var/home/core/zuul-output/0000755000175000017500000000000015114023345014523 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114027714015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003573265115114027706017714 0ustar rootrootDec 03 12:05:01 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 12:05:01 crc restorecon[4533]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:01 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:05:02 crc restorecon[4533]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 12:05:02 crc kubenswrapper[4591]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.761526 4591 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764844 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764861 4591 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764866 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764870 4591 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764873 4591 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764878 4591 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764883 4591 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764888 4591 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764892 4591 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764895 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764898 4591 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764902 4591 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764905 4591 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764908 4591 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764911 4591 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764915 4591 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764918 4591 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764922 4591 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764925 4591 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764928 4591 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764931 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764935 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764938 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764941 4591 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764944 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764947 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764950 4591 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764970 4591 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764975 4591 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764980 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764984 4591 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764987 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764990 4591 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.764995 4591 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765000 4591 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765003 4591 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765006 4591 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765010 4591 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765014 4591 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765018 4591 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765022 4591 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765025 4591 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765028 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765031 4591 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765035 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765040 4591 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765044 4591 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765048 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765051 4591 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765054 4591 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765057 4591 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765073 4591 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765077 4591 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765080 4591 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765084 4591 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765089 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765092 4591 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765095 4591 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765098 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765101 4591 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765104 4591 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765108 4591 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765111 4591 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765114 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765117 4591 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765120 4591 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765124 4591 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765127 4591 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765130 4591 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765134 4591 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765138 4591 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765210 4591 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765219 4591 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765226 4591 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765232 4591 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765237 4591 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765240 4591 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765252 4591 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765258 4591 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765262 4591 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765265 4591 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765269 4591 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765273 4591 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765277 4591 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765281 4591 flags.go:64] FLAG: --cgroup-root="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765285 4591 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765289 4591 flags.go:64] FLAG: --client-ca-file="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765292 4591 flags.go:64] FLAG: --cloud-config="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765296 4591 flags.go:64] FLAG: --cloud-provider="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765299 4591 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765304 4591 flags.go:64] FLAG: --cluster-domain="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765307 4591 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765311 4591 flags.go:64] FLAG: --config-dir="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765315 4591 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765319 4591 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765324 4591 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765328 4591 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765332 4591 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765336 4591 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765340 4591 flags.go:64] FLAG: --contention-profiling="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765344 4591 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765347 4591 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765352 4591 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765356 4591 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765361 4591 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765365 4591 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765369 4591 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765372 4591 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765376 4591 flags.go:64] FLAG: --enable-server="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765380 4591 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765386 4591 flags.go:64] FLAG: --event-burst="100" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765390 4591 flags.go:64] FLAG: --event-qps="50" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765393 4591 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765397 4591 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765401 4591 flags.go:64] FLAG: --eviction-hard="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765406 4591 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765410 4591 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765414 4591 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765417 4591 flags.go:64] FLAG: --eviction-soft="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765421 4591 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765424 4591 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765428 4591 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765432 4591 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765436 4591 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765440 4591 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765444 4591 flags.go:64] FLAG: --feature-gates="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765450 4591 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765454 4591 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765458 4591 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765462 4591 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765466 4591 flags.go:64] FLAG: --healthz-port="10248" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765470 4591 flags.go:64] FLAG: --help="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765473 4591 flags.go:64] FLAG: --hostname-override="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765477 4591 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765481 4591 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765485 4591 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765489 4591 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765493 4591 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765497 4591 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765501 4591 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765504 4591 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765508 4591 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765512 4591 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765516 4591 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765520 4591 flags.go:64] FLAG: --kube-reserved="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765524 4591 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765528 4591 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765532 4591 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765535 4591 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765539 4591 flags.go:64] FLAG: --lock-file="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765548 4591 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765552 4591 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765556 4591 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765566 4591 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765570 4591 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765574 4591 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765579 4591 flags.go:64] FLAG: --logging-format="text" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765582 4591 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765587 4591 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765590 4591 flags.go:64] FLAG: --manifest-url="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765594 4591 flags.go:64] FLAG: --manifest-url-header="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765602 4591 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765605 4591 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765610 4591 flags.go:64] FLAG: --max-pods="110" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765614 4591 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765618 4591 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765622 4591 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765627 4591 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765631 4591 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765636 4591 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765639 4591 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765648 4591 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765652 4591 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765656 4591 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765660 4591 flags.go:64] FLAG: --pod-cidr="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765664 4591 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765669 4591 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765673 4591 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765677 4591 flags.go:64] FLAG: --pods-per-core="0" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765681 4591 flags.go:64] FLAG: --port="10250" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765685 4591 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765688 4591 flags.go:64] FLAG: --provider-id="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765693 4591 flags.go:64] FLAG: --qos-reserved="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765697 4591 flags.go:64] FLAG: --read-only-port="10255" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765701 4591 flags.go:64] FLAG: --register-node="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765704 4591 flags.go:64] FLAG: --register-schedulable="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765709 4591 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765715 4591 flags.go:64] FLAG: --registry-burst="10" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765720 4591 flags.go:64] FLAG: --registry-qps="5" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765723 4591 flags.go:64] FLAG: --reserved-cpus="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765727 4591 flags.go:64] FLAG: --reserved-memory="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765732 4591 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765747 4591 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765751 4591 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765755 4591 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765759 4591 flags.go:64] FLAG: --runonce="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765762 4591 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765766 4591 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765771 4591 flags.go:64] FLAG: --seccomp-default="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765775 4591 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765779 4591 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765783 4591 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765787 4591 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765790 4591 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765794 4591 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765798 4591 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765801 4591 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765805 4591 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765809 4591 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765813 4591 flags.go:64] FLAG: --system-cgroups="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765816 4591 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765822 4591 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765825 4591 flags.go:64] FLAG: --tls-cert-file="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765829 4591 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765833 4591 flags.go:64] FLAG: --tls-min-version="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765837 4591 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765840 4591 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765844 4591 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765848 4591 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765852 4591 flags.go:64] FLAG: --v="2" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765858 4591 flags.go:64] FLAG: --version="false" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765866 4591 flags.go:64] FLAG: --vmodule="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765871 4591 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.765875 4591 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765967 4591 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765972 4591 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765976 4591 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765979 4591 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765983 4591 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765986 4591 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765990 4591 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765993 4591 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.765997 4591 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766000 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766003 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766007 4591 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766010 4591 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766013 4591 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766016 4591 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766020 4591 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766023 4591 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766026 4591 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766029 4591 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766032 4591 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766036 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766039 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766042 4591 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766045 4591 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766049 4591 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766053 4591 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766057 4591 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766085 4591 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766090 4591 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766095 4591 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766098 4591 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766102 4591 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766107 4591 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766111 4591 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766116 4591 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766120 4591 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766124 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766128 4591 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766131 4591 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766134 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766138 4591 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766141 4591 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766144 4591 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766147 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766150 4591 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766153 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766156 4591 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766160 4591 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766163 4591 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766166 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766169 4591 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766172 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766176 4591 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766179 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766182 4591 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766186 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766189 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766192 4591 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766196 4591 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766200 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766204 4591 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766209 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766212 4591 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766215 4591 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766219 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766222 4591 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766226 4591 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766229 4591 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766232 4591 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766236 4591 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.766240 4591 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.766251 4591 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.772223 4591 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.772246 4591 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772295 4591 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772300 4591 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772305 4591 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772309 4591 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772312 4591 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772315 4591 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772319 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772324 4591 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772328 4591 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772333 4591 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772336 4591 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772339 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772343 4591 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772346 4591 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772350 4591 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772353 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772356 4591 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772360 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772363 4591 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772366 4591 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772369 4591 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772372 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772376 4591 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772379 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772383 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772386 4591 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772390 4591 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772393 4591 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772397 4591 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772400 4591 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772404 4591 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772409 4591 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772413 4591 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772417 4591 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772421 4591 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772425 4591 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772428 4591 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772432 4591 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772437 4591 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772441 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772445 4591 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772449 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772453 4591 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772457 4591 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772460 4591 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772464 4591 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772467 4591 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772471 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772474 4591 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772478 4591 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772482 4591 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772485 4591 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772489 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772493 4591 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772497 4591 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772500 4591 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772503 4591 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772506 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772511 4591 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772515 4591 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772518 4591 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772522 4591 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772525 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772529 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772532 4591 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772535 4591 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772538 4591 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772542 4591 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772546 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772550 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772557 4591 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.772563 4591 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772666 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772671 4591 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772675 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772679 4591 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772683 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772686 4591 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772689 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772692 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772696 4591 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772699 4591 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772703 4591 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772706 4591 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772711 4591 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772715 4591 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772719 4591 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772722 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772725 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772728 4591 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772733 4591 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772744 4591 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772748 4591 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772751 4591 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772755 4591 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772758 4591 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772762 4591 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772765 4591 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772768 4591 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772771 4591 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772775 4591 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772778 4591 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772781 4591 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772784 4591 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772787 4591 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772790 4591 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772795 4591 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772798 4591 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772801 4591 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772804 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772808 4591 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772811 4591 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772814 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772818 4591 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772823 4591 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772828 4591 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772832 4591 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772836 4591 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772840 4591 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772843 4591 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772846 4591 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772849 4591 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772853 4591 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772856 4591 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772859 4591 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772863 4591 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772866 4591 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772870 4591 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772873 4591 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772877 4591 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772881 4591 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772884 4591 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772887 4591 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772891 4591 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772894 4591 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772897 4591 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772900 4591 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772903 4591 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772907 4591 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772911 4591 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772914 4591 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772918 4591 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.772922 4591 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.772926 4591 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.773018 4591 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.775596 4591 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.775673 4591 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.776452 4591 server.go:997] "Starting client certificate rotation" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.776476 4591 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.777186 4591 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-13 21:57:16.324472727 +0000 UTC Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.777243 4591 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 993h52m13.547231516s for next certificate rotation Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.788723 4591 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.790210 4591 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.801984 4591 log.go:25] "Validated CRI v1 runtime API" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.819179 4591 log.go:25] "Validated CRI v1 image API" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.820240 4591 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.823347 4591 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-12-01-48-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.823376 4591 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.835778 4591 manager.go:217] Machine: {Timestamp:2025-12-03 12:05:02.834487336 +0000 UTC m=+0.261527126 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445404 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:ad5d5022-d9e3-4192-a6ae-548c1b27699e BootID:b9815d2a-4e84-4a36-9a52-7c608e704615 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:84:17:a3 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:84:17:a3 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:4f:04:50 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:1c:28:74 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:11:9d:29 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:59:27:47 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:ea:6e:70:57:0c:c6 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:fe:5c:0f:de:9e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.835948 4591 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836035 4591 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836284 4591 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836420 4591 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836449 4591 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836615 4591 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.836624 4591 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.837093 4591 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.837120 4591 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.837589 4591 state_mem.go:36] "Initialized new in-memory state store" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.837664 4591 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.839716 4591 kubelet.go:418] "Attempting to sync node with API server" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.839740 4591 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.839760 4591 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.839770 4591 kubelet.go:324] "Adding apiserver pod source" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.839778 4591 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.841777 4591 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.842293 4591 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.843433 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.843745 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.843439 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.843809 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.844769 4591 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.845909 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.845984 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846035 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846099 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846151 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846194 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846246 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846294 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846337 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846386 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846450 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.846497 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.847186 4591 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.847571 4591 server.go:1280] "Started kubelet" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.847774 4591 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.847801 4591 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.847942 4591 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.848156 4591 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 12:05:02 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.849732 4591 server.go:460] "Adding debug handlers to kubelet server" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.850626 4591 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.850651 4591 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.851148 4591 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 03:23:43.230645383 +0000 UTC Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.851177 4591 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 879h18m40.379470682s for next certificate rotation Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.851225 4591 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.851236 4591 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.851324 4591 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.851437 4591 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.851773 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.851840 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.851901 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="200ms" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.852788 4591 factory.go:55] Registering systemd factory Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.852852 4591 factory.go:221] Registration of the systemd container factory successfully Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853126 4591 factory.go:153] Registering CRI-O factory Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853146 4591 factory.go:221] Registration of the crio container factory successfully Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853193 4591 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853232 4591 factory.go:103] Registering Raw factory Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853250 4591 manager.go:1196] Started watching for new ooms in manager Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.853973 4591 manager.go:319] Starting recovery of all containers Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.855125 4591 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.19:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187db30e43cd65c0 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:05:02.847550912 +0000 UTC m=+0.274590681,LastTimestamp:2025-12-03 12:05:02.847550912 +0000 UTC m=+0.274590681,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.860652 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.860787 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.860850 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.860922 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.860984 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861041 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861110 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861197 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861258 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861326 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861387 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861440 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861502 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861561 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861629 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861685 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861745 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861802 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861852 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861911 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.861961 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862009 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862078 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862139 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862203 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862262 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862326 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862387 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862445 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862506 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862569 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862624 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862681 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862732 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862796 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862879 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862933 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.862986 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863035 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863100 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863163 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863219 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863273 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863321 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.863370 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864856 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864895 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864909 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864920 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864930 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864940 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864966 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.864985 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865026 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865038 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865048 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865073 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865084 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865094 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865103 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865114 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865124 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865135 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865144 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865184 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865194 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865203 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865211 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865224 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865232 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865240 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865251 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865258 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865267 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865275 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865297 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865305 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865315 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865322 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865330 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865338 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865360 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865369 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865378 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865387 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865397 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865406 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865417 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865425 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865434 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865442 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865451 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865461 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.865471 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867248 4591 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867442 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867459 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867470 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867490 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867500 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867509 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867522 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867532 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867540 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867549 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867565 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867576 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867588 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867598 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867607 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867618 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867627 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867638 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867647 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867656 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867666 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867675 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867684 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867693 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867701 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867709 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867717 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867726 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867745 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867754 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867764 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867772 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867782 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867791 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867801 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867809 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867818 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867826 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867836 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867845 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867854 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867862 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867871 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867880 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867889 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867898 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867907 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867915 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867924 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867933 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867942 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867950 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867959 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867966 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867975 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867984 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.867994 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868002 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868013 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868022 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868037 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868048 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868057 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868129 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868138 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868148 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868156 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868165 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868174 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868183 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868192 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868200 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868209 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868217 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868226 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868235 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868243 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868251 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868260 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868267 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868276 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868286 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868295 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868303 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868312 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868321 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868328 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868337 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868346 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868355 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868364 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868372 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868383 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868392 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868400 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868412 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868421 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868430 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868439 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868448 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868455 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868463 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868471 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868480 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868489 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868498 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868507 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868516 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868524 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868561 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868571 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868580 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868589 4591 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868598 4591 reconstruct.go:97] "Volume reconstruction finished" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868605 4591 reconciler.go:26] "Reconciler: start to sync state" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.868944 4591 manager.go:324] Recovery completed Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.876793 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.878104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.878131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.878140 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.879553 4591 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.879798 4591 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.880395 4591 state_mem.go:36] "Initialized new in-memory state store" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.885138 4591 policy_none.go:49] "None policy: Start" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.886349 4591 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.886371 4591 state_mem.go:35] "Initializing new in-memory state store" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.887948 4591 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.889218 4591 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.889254 4591 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.889277 4591 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.889314 4591 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 12:05:02 crc kubenswrapper[4591]: W1203 12:05:02.890297 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.890356 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.921824 4591 manager.go:334] "Starting Device Plugin manager" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.921868 4591 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.921878 4591 server.go:79] "Starting device plugin registration server" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.922227 4591 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.922244 4591 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.922533 4591 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.922613 4591 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.922625 4591 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 12:05:02 crc kubenswrapper[4591]: E1203 12:05:02.928184 4591 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.990081 4591 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.990157 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.990959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.990995 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991149 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991320 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991358 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991913 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991942 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991951 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991909 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991974 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.991983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992095 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992219 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992252 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992891 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992923 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.992982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993010 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993018 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993146 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993270 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993297 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993881 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993901 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993910 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993965 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993980 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.993999 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994002 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994130 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994159 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994539 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994563 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994571 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994672 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994696 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994731 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994757 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.994767 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.995186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.995211 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:02 crc kubenswrapper[4591]: I1203 12:05:02.995218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.022474 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.023729 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.023766 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.023777 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.023795 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: E1203 12:05:03.024146 4591 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.19:6443: connect: connection refused" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: E1203 12:05:03.052503 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="400ms" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070344 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070368 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070387 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070403 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070419 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070434 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070450 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070467 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070481 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070494 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070508 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070522 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070534 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070548 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.070560 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171508 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171545 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171563 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171578 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171593 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171607 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171623 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171630 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171658 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171637 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171659 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171672 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171696 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171637 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171734 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171760 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171718 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171788 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171789 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171684 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171820 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171773 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171872 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171896 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171915 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171935 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171951 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171934 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171976 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.171997 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.224542 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.225238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.225289 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.225302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.225320 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: E1203 12:05:03.225633 4591 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.19:6443: connect: connection refused" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.317832 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.323434 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: W1203 12:05:03.339084 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-4a4fe34fcdcccf249585d84ae29a25174a86a4f32d7047a3adb31cd1e4986046 WatchSource:0}: Error finding container 4a4fe34fcdcccf249585d84ae29a25174a86a4f32d7047a3adb31cd1e4986046: Status 404 returned error can't find the container with id 4a4fe34fcdcccf249585d84ae29a25174a86a4f32d7047a3adb31cd1e4986046 Dec 03 12:05:03 crc kubenswrapper[4591]: W1203 12:05:03.341178 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ba325438e32b4b9d94ebc3b58f83272a8aae9ea767eb57ade68926043ced8a0e WatchSource:0}: Error finding container ba325438e32b4b9d94ebc3b58f83272a8aae9ea767eb57ade68926043ced8a0e: Status 404 returned error can't find the container with id ba325438e32b4b9d94ebc3b58f83272a8aae9ea767eb57ade68926043ced8a0e Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.343217 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: W1203 12:05:03.355121 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-6143fbcf1089febb2651a32d3ed7242617b745593e10942f12140868d954246d WatchSource:0}: Error finding container 6143fbcf1089febb2651a32d3ed7242617b745593e10942f12140868d954246d: Status 404 returned error can't find the container with id 6143fbcf1089febb2651a32d3ed7242617b745593e10942f12140868d954246d Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.360864 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.366420 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:03 crc kubenswrapper[4591]: W1203 12:05:03.370743 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-91b79ab190194eb1b88e8a8632eeef6f9bdc26040ff6fe616aa8bbb96e526216 WatchSource:0}: Error finding container 91b79ab190194eb1b88e8a8632eeef6f9bdc26040ff6fe616aa8bbb96e526216: Status 404 returned error can't find the container with id 91b79ab190194eb1b88e8a8632eeef6f9bdc26040ff6fe616aa8bbb96e526216 Dec 03 12:05:03 crc kubenswrapper[4591]: W1203 12:05:03.376820 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a1b75f5188f38ea9154bb9bd15048809efd2e7c18dd31f2199700ab3566fb655 WatchSource:0}: Error finding container a1b75f5188f38ea9154bb9bd15048809efd2e7c18dd31f2199700ab3566fb655: Status 404 returned error can't find the container with id a1b75f5188f38ea9154bb9bd15048809efd2e7c18dd31f2199700ab3566fb655 Dec 03 12:05:03 crc kubenswrapper[4591]: E1203 12:05:03.453439 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="800ms" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.626447 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.627677 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.627706 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.627714 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.627732 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: E1203 12:05:03.627950 4591 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.19:6443: connect: connection refused" node="crc" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.848601 4591 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893117 4591 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22" exitCode=0 Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893180 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893250 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"91b79ab190194eb1b88e8a8632eeef6f9bdc26040ff6fe616aa8bbb96e526216"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893330 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893936 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893962 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.893971 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.894582 4591 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b" exitCode=0 Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.894643 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.894664 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6143fbcf1089febb2651a32d3ed7242617b745593e10942f12140868d954246d"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.894705 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.895395 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.895421 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.895429 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.896338 4591 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f" exitCode=0 Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.896632 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.896689 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4a4fe34fcdcccf249585d84ae29a25174a86a4f32d7047a3adb31cd1e4986046"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.896818 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.897619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.897647 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.897662 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.899435 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.899464 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ba325438e32b4b9d94ebc3b58f83272a8aae9ea767eb57ade68926043ced8a0e"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.900558 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf" exitCode=0 Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.900586 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.900601 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a1b75f5188f38ea9154bb9bd15048809efd2e7c18dd31f2199700ab3566fb655"} Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.900657 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.901153 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.901169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.901177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.902328 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.903179 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.903209 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:03 crc kubenswrapper[4591]: I1203 12:05:03.903220 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: W1203 12:05:04.003074 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.003132 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:04 crc kubenswrapper[4591]: W1203 12:05:04.005644 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.005689 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:04 crc kubenswrapper[4591]: W1203 12:05:04.057281 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.057417 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:04 crc kubenswrapper[4591]: W1203 12:05:04.118030 4591 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.19:6443: connect: connection refused Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.118134 4591 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.19:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.254573 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="1.6s" Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.402375 4591 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.19:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187db30e43cd65c0 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:05:02.847550912 +0000 UTC m=+0.274590681,LastTimestamp:2025-12-03 12:05:02.847550912 +0000 UTC m=+0.274590681,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.428518 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.432877 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.432924 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.432934 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.432962 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:04 crc kubenswrapper[4591]: E1203 12:05:04.433406 4591 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.19:6443: connect: connection refused" node="crc" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905308 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905360 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905373 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905382 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905392 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.905491 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.906299 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.906329 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.906338 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.907734 4591 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733" exitCode=0 Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.907797 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.907887 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.908592 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.908614 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.908622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.910119 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a0248c27276ad30013892a64d9af62fb7d4831802e820d201fc61d91dce82c2f"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.910181 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.910927 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.910947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.910954 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.912710 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.912731 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.912744 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.912807 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.913438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.913457 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.913465 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.914962 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.914985 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.914995 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e"} Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.915043 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.915479 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.915497 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:04 crc kubenswrapper[4591]: I1203 12:05:04.915504 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.918384 4591 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058" exitCode=0 Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.918464 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058"} Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.918478 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.918562 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919300 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919328 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919337 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919336 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919433 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:05 crc kubenswrapper[4591]: I1203 12:05:05.919445 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.033482 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.034156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.034179 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.034187 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.034202 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924215 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3"} Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924266 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b"} Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924277 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b"} Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924290 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762"} Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924298 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5"} Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.924445 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.925199 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.925551 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:06 crc kubenswrapper[4591]: I1203 12:05:06.925559 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.935600 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.935707 4591 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.935738 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.936612 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.936648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:07 crc kubenswrapper[4591]: I1203 12:05:07.936656 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:08 crc kubenswrapper[4591]: I1203 12:05:08.153479 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:08 crc kubenswrapper[4591]: I1203 12:05:08.153575 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:08 crc kubenswrapper[4591]: I1203 12:05:08.154177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:08 crc kubenswrapper[4591]: I1203 12:05:08.154200 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:08 crc kubenswrapper[4591]: I1203 12:05:08.154208 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.167809 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.167919 4591 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.167952 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.168863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.168901 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:09 crc kubenswrapper[4591]: I1203 12:05:09.168912 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:10 crc kubenswrapper[4591]: I1203 12:05:10.473977 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:10 crc kubenswrapper[4591]: I1203 12:05:10.474178 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:10 crc kubenswrapper[4591]: I1203 12:05:10.474957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:10 crc kubenswrapper[4591]: I1203 12:05:10.474993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:10 crc kubenswrapper[4591]: I1203 12:05:10.475003 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.171052 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.171273 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.172239 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.172275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.172284 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.815629 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.815782 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.818468 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.818502 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:11 crc kubenswrapper[4591]: I1203 12:05:11.818512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:12 crc kubenswrapper[4591]: I1203 12:05:12.064010 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:12 crc kubenswrapper[4591]: I1203 12:05:12.064150 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:12 crc kubenswrapper[4591]: I1203 12:05:12.065051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:12 crc kubenswrapper[4591]: I1203 12:05:12.065112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:12 crc kubenswrapper[4591]: I1203 12:05:12.065124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:12 crc kubenswrapper[4591]: E1203 12:05:12.928250 4591 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.204123 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.204236 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.205057 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.205115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.205131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.207186 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.208395 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.474412 4591 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.474472 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.936764 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.937821 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.937866 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.937874 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:13 crc kubenswrapper[4591]: I1203 12:05:13.939630 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.686628 4591 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.686703 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.690834 4591 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.690888 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.938591 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.939611 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.939639 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.939646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.983148 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.983257 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.984091 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.984129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:14 crc kubenswrapper[4591]: I1203 12:05:14.984138 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:15 crc kubenswrapper[4591]: I1203 12:05:15.940666 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:15 crc kubenswrapper[4591]: I1203 12:05:15.941651 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:15 crc kubenswrapper[4591]: I1203 12:05:15.941697 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:15 crc kubenswrapper[4591]: I1203 12:05:15.941711 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.173212 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.173389 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.173804 4591 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.173898 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.174314 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.174363 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.174373 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.176913 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.659263 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.660878 4591 trace.go:236] Trace[1012307809]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:05:07.302) (total time: 12358ms): Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[1012307809]: ---"Objects listed" error: 12358ms (12:05:19.660) Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[1012307809]: [12.358424164s] [12.358424164s] END Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.660897 4591 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.661748 4591 trace.go:236] Trace[1671276991]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:05:06.589) (total time: 13072ms): Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[1671276991]: ---"Objects listed" error: 13072ms (12:05:19.661) Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[1671276991]: [13.072081672s] [13.072081672s] END Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.661775 4591 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.663418 4591 trace.go:236] Trace[697276675]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:05:06.061) (total time: 13602ms): Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[697276675]: ---"Objects listed" error: 13601ms (12:05:19.662) Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[697276675]: [13.602107156s] [13.602107156s] END Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.663443 4591 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.663748 4591 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.664212 4591 trace.go:236] Trace[4201406]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:05:07.086) (total time: 12577ms): Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[4201406]: ---"Objects listed" error: 12577ms (12:05:19.664) Dec 03 12:05:19 crc kubenswrapper[4591]: Trace[4201406]: [12.577560184s] [12.577560184s] END Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.664231 4591 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.665156 4591 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.849108 4591 apiserver.go:52] "Watching apiserver" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.851762 4591 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852035 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852459 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852607 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852727 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.852851 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852978 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.852504 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.853124 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.853214 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.853306 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855249 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855314 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855310 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855469 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855642 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855795 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855884 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855886 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.855983 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.877768 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.890005 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.898716 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.906578 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.914767 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.921656 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.931325 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.951892 4591 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.962527 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965126 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965165 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965190 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965205 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965221 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965238 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965252 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965271 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965290 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965309 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965330 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965347 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965363 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965378 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965395 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965413 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965433 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965454 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965480 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965498 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965493 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965517 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965534 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965551 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965569 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965586 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965606 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965623 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965639 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965656 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965674 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965672 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965691 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965773 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965802 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965824 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965842 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965862 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965884 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965903 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965920 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965936 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965952 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965968 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965987 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966004 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966020 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966037 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966053 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966087 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966104 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966120 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966136 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966152 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966172 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966192 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966213 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966230 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966249 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966264 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966283 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966300 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966319 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966338 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966357 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966376 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966394 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966411 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966427 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966445 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966461 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966478 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966499 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966526 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966542 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966559 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966575 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966592 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966610 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966627 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966646 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966661 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966679 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966695 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966713 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966729 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966762 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966779 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966797 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966816 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966832 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966847 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966866 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966883 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966898 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965812 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966915 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965947 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966935 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965955 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.965968 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966022 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966056 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966148 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966163 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966206 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966232 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966251 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966317 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966468 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966492 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966572 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966693 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966762 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966795 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966896 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967115 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967163 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967252 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967346 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967350 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967379 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.966954 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967435 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967446 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967457 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967499 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967520 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967534 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967549 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967581 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967599 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967614 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967619 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967645 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967706 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967715 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967702 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967766 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967768 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967794 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967813 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967818 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967818 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967878 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967905 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967926 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967948 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967967 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967985 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968004 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967922 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967949 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.967979 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968045 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968014 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968111 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968171 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968202 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968342 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968402 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968435 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968520 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968628 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968663 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968758 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968814 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968869 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969020 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969042 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.968027 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969194 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969215 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969232 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969247 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969268 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969285 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969300 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969314 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969330 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969347 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969364 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969380 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969395 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969412 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969431 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969445 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969461 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969479 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969493 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969509 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969554 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969574 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969596 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969615 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969636 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969654 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969672 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969690 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969707 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969725 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969752 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969770 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969786 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969806 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969824 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969841 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969858 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969874 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969893 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969908 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969925 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969941 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969959 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969976 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.969991 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970236 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970265 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970263 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970283 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970288 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970301 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970418 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970945 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970977 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.970999 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971019 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971037 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971042 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971115 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971139 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971160 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971181 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971203 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971227 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971367 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971247 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971438 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971460 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971481 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971502 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971523 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971545 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971550 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971564 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971578 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971604 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971631 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971654 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971688 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971711 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971746 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971768 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.971806 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:20.471773121 +0000 UTC m=+17.898812881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971868 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971910 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971951 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971956 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.971979 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972085 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972126 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972153 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972182 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972185 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972254 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972231 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972326 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972356 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972382 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972363 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972407 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972410 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972531 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972536 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972557 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972586 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972611 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972577 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972637 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972722 4591 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972734 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972758 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972769 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972781 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972791 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972801 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972810 4591 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972820 4591 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972830 4591 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972840 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972848 4591 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972859 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972883 4591 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972893 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972890 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972902 4591 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972914 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972926 4591 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972936 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972968 4591 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972978 4591 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972989 4591 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972998 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973011 4591 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973021 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973031 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973040 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973049 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973058 4591 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973085 4591 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973094 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973104 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973114 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973123 4591 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973132 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973140 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973149 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973158 4591 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973167 4591 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973176 4591 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973188 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973201 4591 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973211 4591 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973220 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973230 4591 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973240 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973252 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973261 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973270 4591 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973281 4591 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973289 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973298 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973308 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973317 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973329 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973339 4591 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973348 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973358 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973367 4591 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973376 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973385 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973396 4591 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973405 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973414 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973422 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973431 4591 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973442 4591 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973451 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973460 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972969 4591 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.972898 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973013 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973301 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973438 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973461 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.973638 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.974009 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.974036 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.974704 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.974878 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975520 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975583 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975617 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975643 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975679 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.975895 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.976142 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.976995 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.976999 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977057 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977110 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.976800 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977481 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977527 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977720 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.977924 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.978127 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.978296 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.978686 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.978848 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.978858 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979043 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979071 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979087 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979111 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979114 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979342 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979373 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979383 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979372 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979507 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979634 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979778 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979864 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.979957 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980106 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980452 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980503 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980680 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980690 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980682 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980895 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.980979 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981044 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981110 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981239 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981240 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981385 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981651 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981664 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.981680 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982176 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982245 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982301 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982373 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982548 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.982945 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983113 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983133 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983190 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.983270 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.993085 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:20.493021732 +0000 UTC m=+17.920061502 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.989155 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983380 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983532 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983497 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983601 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983787 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.983778 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.984244 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.984487 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.984666 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.988298 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.989377 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.989661 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.990888 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.991143 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.991445 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.991546 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:19 crc kubenswrapper[4591]: E1203 12:05:19.993312 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:20.493295035 +0000 UTC m=+17.920334806 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.991929 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.992309 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.997404 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:19 crc kubenswrapper[4591]: I1203 12:05:19.997551 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.010626 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.010899 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.010956 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.010992 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011013 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011029 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011106 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:20.511089398 +0000 UTC m=+17.938129168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011198 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011217 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011229 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.011262 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:20.511255009 +0000 UTC m=+17.938294778 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.013394 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.013491 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.013561 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.013702 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.013914 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.014471 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.014601 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.018288 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.018744 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.018801 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.022320 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.022344 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.022462 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.022482 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.022858 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023478 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023572 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023601 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023638 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023783 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.023998 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.024143 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.024991 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.025182 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.025559 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.026597 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.026809 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.027435 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.027618 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.027622 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.027837 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.027923 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.029288 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.029544 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.030219 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.030867 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.040697 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.041354 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.048767 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.050661 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.055862 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.056573 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.058905 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.068567 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074630 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074701 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074775 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074786 4591 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074794 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074844 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074933 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.074940 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075012 4591 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075029 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075047 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075081 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075097 4591 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075112 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075127 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075142 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075158 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075174 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075188 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075203 4591 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075216 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075232 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075247 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075263 4591 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075277 4591 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075291 4591 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075304 4591 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075319 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075335 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075347 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075361 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075374 4591 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075391 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075406 4591 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075423 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075437 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075451 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075467 4591 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075481 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075494 4591 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075510 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075523 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075539 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075551 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075565 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075579 4591 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075592 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075602 4591 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075614 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075624 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075637 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075647 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075658 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075668 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075679 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075688 4591 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075699 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075710 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075723 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075743 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075754 4591 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075766 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075780 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075791 4591 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075801 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075811 4591 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075823 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075832 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075844 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075854 4591 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075865 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075877 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075888 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075897 4591 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075908 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075918 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075930 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075942 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075951 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075962 4591 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075972 4591 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075982 4591 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.075995 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076005 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076018 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076028 4591 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076039 4591 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076050 4591 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076060 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076084 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076098 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076109 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076119 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076129 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076139 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076149 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076159 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076169 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076179 4591 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076189 4591 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076199 4591 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076209 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076218 4591 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076228 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076239 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076248 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076260 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076270 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076280 4591 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076290 4591 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076301 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076311 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076443 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076633 4591 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076646 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076659 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076710 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076723 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076734 4591 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076755 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076777 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076792 4591 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076804 4591 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076815 4591 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076981 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.077008 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.077037 4591 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.077051 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.077089 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.076940 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.089234 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.098425 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.168604 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.173605 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.179527 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:05:20 crc kubenswrapper[4591]: W1203 12:05:20.181216 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-1023c5da14124971a441e999dd566053e56281b439cec37d02ca55c8709b403b WatchSource:0}: Error finding container 1023c5da14124971a441e999dd566053e56281b439cec37d02ca55c8709b403b: Status 404 returned error can't find the container with id 1023c5da14124971a441e999dd566053e56281b439cec37d02ca55c8709b403b Dec 03 12:05:20 crc kubenswrapper[4591]: W1203 12:05:20.187965 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-3a891a0f23843b0693c03e87d46a937988354e269a2c0f6167e08a3cfde8c3cf WatchSource:0}: Error finding container 3a891a0f23843b0693c03e87d46a937988354e269a2c0f6167e08a3cfde8c3cf: Status 404 returned error can't find the container with id 3a891a0f23843b0693c03e87d46a937988354e269a2c0f6167e08a3cfde8c3cf Dec 03 12:05:20 crc kubenswrapper[4591]: W1203 12:05:20.193897 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-98f1366b21352c97c99f508ec130fa119fd75bf2873d914d166aefa4c8adfef5 WatchSource:0}: Error finding container 98f1366b21352c97c99f508ec130fa119fd75bf2873d914d166aefa4c8adfef5: Status 404 returned error can't find the container with id 98f1366b21352c97c99f508ec130fa119fd75bf2873d914d166aefa4c8adfef5 Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.478547 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.481482 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.481639 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:21.481621717 +0000 UTC m=+18.908661487 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.482123 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.485878 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.488199 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.495912 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.505235 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.513801 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.522484 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.529937 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.536668 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.543892 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.552744 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.562358 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.572605 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.582227 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.582362 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.582417 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.582444 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582471 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.582482 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582524 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:21.582509824 +0000 UTC m=+19.009549594 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582580 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582603 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582620 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582649 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:21.582634669 +0000 UTC m=+19.009674439 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582648 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582693 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582706 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582778 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:21.582746509 +0000 UTC m=+19.009786279 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582648 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: E1203 12:05:20.582935 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:21.582894015 +0000 UTC m=+19.009933786 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.593211 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.601698 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.611532 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.913323 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.913968 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.914606 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.915187 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.915705 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.916177 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.916697 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.917242 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.917815 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.919761 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.920252 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.921185 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.921643 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.922126 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.922935 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.923416 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.924347 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.924744 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.925252 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.926196 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.926608 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.927517 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.927913 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.928822 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.929273 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.929842 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.933466 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.934024 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.934592 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.935501 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.935973 4591 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.936096 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.938488 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.939128 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.939506 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.941384 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.941994 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.942867 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.943433 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.944377 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.944836 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.945385 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.946390 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.947268 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.947703 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.948569 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.949042 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.950158 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.950588 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.951580 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.952248 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.953008 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.955818 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.956973 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.957907 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.957950 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.957964 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1023c5da14124971a441e999dd566053e56281b439cec37d02ca55c8709b403b"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.957975 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"98f1366b21352c97c99f508ec130fa119fd75bf2873d914d166aefa4c8adfef5"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.957986 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.958003 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"3a891a0f23843b0693c03e87d46a937988354e269a2c0f6167e08a3cfde8c3cf"} Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.968465 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.977302 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.986669 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:20 crc kubenswrapper[4591]: I1203 12:05:20.995980 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.005801 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.015934 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.041288 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.051757 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.062109 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.071798 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.081104 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.090322 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.100082 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.110439 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.121265 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.130208 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.489814 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.489993 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:23.489959638 +0000 UTC m=+20.916999408 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.591228 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.591277 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.591303 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.591330 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591467 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591467 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591468 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591522 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591539 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591566 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:23.591546949 +0000 UTC m=+21.018586719 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591485 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591585 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591635 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:23.591578138 +0000 UTC m=+21.018617907 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591646 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591661 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:23.591654571 +0000 UTC m=+21.018694341 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.591673 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:23.591668116 +0000 UTC m=+21.018707887 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.890215 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.890300 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:21 crc kubenswrapper[4591]: I1203 12:05:21.890300 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.890420 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.890464 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:21 crc kubenswrapper[4591]: E1203 12:05:21.890512 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.866183 4591 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.867902 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.867936 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.867946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.867990 4591 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.872811 4591 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.873191 4591 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.874423 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.874460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.874471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.874490 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.874504 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.888865 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.892801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.892837 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.892849 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.892861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.892873 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.902284 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.903299 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.904741 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.904779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.904790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.904807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.904818 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.912015 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.914035 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.916857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.916886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.916895 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.916919 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.916930 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.937570 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.951152 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.951648 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.959126 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.959186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.959206 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.959227 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.959237 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.962670 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04"} Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.970926 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.971371 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: E1203 12:05:22.971486 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.972608 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.972682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.972750 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.972812 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.972885 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:22Z","lastTransitionTime":"2025-12-03T12:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.980234 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:22 crc kubenswrapper[4591]: I1203 12:05:22.990561 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.000259 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.010120 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.018772 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.027056 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.037339 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.046901 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.056809 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.068575 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.075124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.075179 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.075191 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.075225 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.075238 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.079840 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.177948 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.177987 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.177996 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.178011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.178021 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.280437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.280470 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.280480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.280496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.280510 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.383016 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.383052 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.383076 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.383092 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.383101 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.485128 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.485157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.485166 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.485177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.485185 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.506621 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.506777 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:27.506755345 +0000 UTC m=+24.933795115 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.586988 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.587019 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.587029 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.587039 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.587048 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.607580 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.607611 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.607634 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.607652 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607730 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607749 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607763 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607761 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607778 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607805 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:27.607792171 +0000 UTC m=+25.034831941 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607730 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607825 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:27.607814964 +0000 UTC m=+25.034854735 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607825 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607837 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:27.607832107 +0000 UTC m=+25.034871877 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607838 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.607870 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:27.607859148 +0000 UTC m=+25.034898918 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.689308 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.689344 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.689356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.689372 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.689383 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.792042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.792114 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.792125 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.792147 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.792161 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.890042 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.890151 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.890052 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.890170 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.890304 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:23 crc kubenswrapper[4591]: E1203 12:05:23.890471 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.893933 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.893967 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.893978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.893990 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.893999 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.996707 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.996755 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.996766 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.996782 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:23 crc kubenswrapper[4591]: I1203 12:05:23.996796 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:23Z","lastTransitionTime":"2025-12-03T12:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.098792 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.098844 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.098856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.098876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.098894 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.200753 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.200796 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.200805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.200821 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.200835 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.303706 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.303787 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.303809 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.303834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.303847 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.324883 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-bshxj"] Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.325427 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-rwr66"] Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.325649 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.326213 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.328827 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.328900 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.329163 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.329287 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.329696 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.331199 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.331429 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.341708 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.351598 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.359909 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.368754 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.377419 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.386539 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.394594 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.404674 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.406044 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.406103 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.406115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.406133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.406144 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.411894 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.415386 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2d41611d-4a75-440e-9c67-4222a9b4fd34-hosts-file\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.415524 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtjfn\" (UniqueName: \"kubernetes.io/projected/2d41611d-4a75-440e-9c67-4222a9b4fd34-kube-api-access-dtjfn\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.415579 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-serviceca\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.415632 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-host\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.415652 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75bgw\" (UniqueName: \"kubernetes.io/projected/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-kube-api-access-75bgw\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.422430 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.431281 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.438884 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.449574 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.460290 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.471402 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.492560 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.508882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.508939 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.508956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.508981 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.508996 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516205 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2d41611d-4a75-440e-9c67-4222a9b4fd34-hosts-file\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516306 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtjfn\" (UniqueName: \"kubernetes.io/projected/2d41611d-4a75-440e-9c67-4222a9b4fd34-kube-api-access-dtjfn\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516351 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2d41611d-4a75-440e-9c67-4222a9b4fd34-hosts-file\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516453 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-serviceca\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516563 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-host\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516628 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75bgw\" (UniqueName: \"kubernetes.io/projected/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-kube-api-access-75bgw\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.516702 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-host\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.517356 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-serviceca\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.521751 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.537055 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75bgw\" (UniqueName: \"kubernetes.io/projected/fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f-kube-api-access-75bgw\") pod \"node-ca-bshxj\" (UID: \"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\") " pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.537493 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtjfn\" (UniqueName: \"kubernetes.io/projected/2d41611d-4a75-440e-9c67-4222a9b4fd34-kube-api-access-dtjfn\") pod \"node-resolver-rwr66\" (UID: \"2d41611d-4a75-440e-9c67-4222a9b4fd34\") " pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.547214 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.558468 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.611327 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.611366 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.611376 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.611391 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.611401 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.637864 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bshxj" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.642970 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rwr66" Dec 03 12:05:24 crc kubenswrapper[4591]: W1203 12:05:24.653994 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d41611d_4a75_440e_9c67_4222a9b4fd34.slice/crio-baab9ec9de112c7677a27330015f32fe0d4af68ea68639a47075cd903ce62b55 WatchSource:0}: Error finding container baab9ec9de112c7677a27330015f32fe0d4af68ea68639a47075cd903ce62b55: Status 404 returned error can't find the container with id baab9ec9de112c7677a27330015f32fe0d4af68ea68639a47075cd903ce62b55 Dec 03 12:05:24 crc kubenswrapper[4591]: W1203 12:05:24.655078 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfad8ba4a_fd4a_40c7_b5ca_94b9c286a48f.slice/crio-892fbca5cf021d4a3a331365476132a8fa0b79935b5ecf70a43318993482fa10 WatchSource:0}: Error finding container 892fbca5cf021d4a3a331365476132a8fa0b79935b5ecf70a43318993482fa10: Status 404 returned error can't find the container with id 892fbca5cf021d4a3a331365476132a8fa0b79935b5ecf70a43318993482fa10 Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.715047 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.715103 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.715114 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.715132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.715148 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.817588 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.817633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.817645 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.817665 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.817675 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.919983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.920017 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.920026 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.920043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.920053 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:24Z","lastTransitionTime":"2025-12-03T12:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.969159 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rwr66" event={"ID":"2d41611d-4a75-440e-9c67-4222a9b4fd34","Type":"ContainerStarted","Data":"7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.969221 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rwr66" event={"ID":"2d41611d-4a75-440e-9c67-4222a9b4fd34","Type":"ContainerStarted","Data":"baab9ec9de112c7677a27330015f32fe0d4af68ea68639a47075cd903ce62b55"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.971171 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bshxj" event={"ID":"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f","Type":"ContainerStarted","Data":"ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.971217 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bshxj" event={"ID":"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f","Type":"ContainerStarted","Data":"892fbca5cf021d4a3a331365476132a8fa0b79935b5ecf70a43318993482fa10"} Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.980662 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:24 crc kubenswrapper[4591]: I1203 12:05:24.992622 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.003232 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.004651 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.014530 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.020020 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.021903 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.021945 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.021955 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.021973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.021987 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.043677 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.067147 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-mnzzd"] Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.067518 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.068564 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-2qprr"] Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.068828 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.070325 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-5pvst"] Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.070904 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-k4dxv"] Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.071091 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.071360 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.071549 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.071975 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.071982 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.072038 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.072111 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.072130 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.072323 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.072540 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.073527 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.075716 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.075722 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.075759 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.075834 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.076123 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.076132 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.076456 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.077944 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.080903 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.081130 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.090268 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.110987 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.119401 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121407 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121531 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121562 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121585 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121601 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121617 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-etc-kubernetes\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121638 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121655 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-system-cni-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121671 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-binary-copy\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121687 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-k8s-cni-cncf-io\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121704 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-conf-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121719 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-socket-dir-parent\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121746 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-hostroot\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121762 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121780 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121797 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121814 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-os-release\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121830 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-kubelet\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121844 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121861 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw8cm\" (UniqueName: \"kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121877 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96827b8d-1081-4acd-a2db-c2fa3a87b42a-proxy-tls\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121892 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121906 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121920 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121952 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121981 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.121999 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-multus\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122030 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96827b8d-1081-4acd-a2db-c2fa3a87b42a-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122047 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122081 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122099 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122113 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-daemon-config\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122126 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-multus-certs\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122156 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122181 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-bin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122202 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/96827b8d-1081-4acd-a2db-c2fa3a87b42a-rootfs\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122234 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qrqj\" (UniqueName: \"kubernetes.io/projected/96827b8d-1081-4acd-a2db-c2fa3a87b42a-kube-api-access-5qrqj\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122252 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-cnibin\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122268 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122286 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122300 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-netns\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122314 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlrpg\" (UniqueName: \"kubernetes.io/projected/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-kube-api-access-nlrpg\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122354 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-os-release\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122372 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkg7m\" (UniqueName: \"kubernetes.io/projected/d56ae362-837a-44a3-8ec3-d082ac52977b-kube-api-access-fkg7m\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122388 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122409 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-system-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122424 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cnibin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122442 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cni-binary-copy\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.122496 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.123712 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.123744 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.123755 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.123768 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.123777 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.138356 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.150237 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.163219 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.179456 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.194654 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.203987 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.214660 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.222874 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.222907 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.222937 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.222936 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223012 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223013 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223081 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223034 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223088 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223124 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-etc-kubernetes\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223144 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223162 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-system-cni-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223182 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-binary-copy\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223200 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-k8s-cni-cncf-io\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223216 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-conf-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223236 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-socket-dir-parent\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223252 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-hostroot\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223266 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-kubelet\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223286 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223301 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223315 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223330 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-os-release\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223343 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223366 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw8cm\" (UniqueName: \"kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223381 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223401 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96827b8d-1081-4acd-a2db-c2fa3a87b42a-proxy-tls\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223416 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223430 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223445 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223459 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223473 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-multus\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223487 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-multus-certs\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223513 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96827b8d-1081-4acd-a2db-c2fa3a87b42a-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223528 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223544 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223566 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223580 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-daemon-config\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223594 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-bin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223623 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223638 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/96827b8d-1081-4acd-a2db-c2fa3a87b42a-rootfs\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223653 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-netns\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223674 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qrqj\" (UniqueName: \"kubernetes.io/projected/96827b8d-1081-4acd-a2db-c2fa3a87b42a-kube-api-access-5qrqj\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223689 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-cnibin\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223706 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223720 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223721 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223742 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlrpg\" (UniqueName: \"kubernetes.io/projected/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-kube-api-access-nlrpg\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223758 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cnibin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223782 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-os-release\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223798 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-socket-dir-parent\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223799 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkg7m\" (UniqueName: \"kubernetes.io/projected/d56ae362-837a-44a3-8ec3-d082ac52977b-kube-api-access-fkg7m\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223832 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223848 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-system-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223864 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cni-binary-copy\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223889 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-hostroot\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223932 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-kubelet\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.223957 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224010 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-etc-kubernetes\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224036 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224056 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-system-cni-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224097 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224457 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224658 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-binary-copy\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224699 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-k8s-cni-cncf-io\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224721 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-conf-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224758 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.224859 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-system-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225336 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cni-binary-copy\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225542 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-os-release\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225633 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-cnibin\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225719 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-netns\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225952 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/96827b8d-1081-4acd-a2db-c2fa3a87b42a-rootfs\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225980 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-bin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.225988 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226054 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-daemon-config\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226262 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226285 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226301 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226340 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-multus-cni-dir\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226355 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-var-lib-cni-multus\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226368 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-host-run-multus-certs\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226657 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226712 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226860 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96827b8d-1081-4acd-a2db-c2fa3a87b42a-mcd-auth-proxy-config\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226907 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-os-release\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226885 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-cnibin\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226926 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.226935 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227266 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d56ae362-837a-44a3-8ec3-d082ac52977b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227414 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227483 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d56ae362-837a-44a3-8ec3-d082ac52977b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227488 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227534 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227567 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.227807 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.228815 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.229048 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96827b8d-1081-4acd-a2db-c2fa3a87b42a-proxy-tls\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.238163 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkg7m\" (UniqueName: \"kubernetes.io/projected/d56ae362-837a-44a3-8ec3-d082ac52977b-kube-api-access-fkg7m\") pod \"multus-additional-cni-plugins-5pvst\" (UID: \"d56ae362-837a-44a3-8ec3-d082ac52977b\") " pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.239125 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw8cm\" (UniqueName: \"kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm\") pod \"ovnkube-node-k4dxv\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.240789 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.242049 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlrpg\" (UniqueName: \"kubernetes.io/projected/19d5b224-0f8a-49a3-84f4-f2c0ef74fda4-kube-api-access-nlrpg\") pod \"multus-2qprr\" (UID: \"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\") " pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.244298 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qrqj\" (UniqueName: \"kubernetes.io/projected/96827b8d-1081-4acd-a2db-c2fa3a87b42a-kube-api-access-5qrqj\") pod \"machine-config-daemon-mnzzd\" (UID: \"96827b8d-1081-4acd-a2db-c2fa3a87b42a\") " pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.250241 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.261266 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.269690 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.279089 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.287643 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.295387 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.303522 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.313418 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.330226 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.330276 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.330285 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.330301 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.330312 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.378720 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.386060 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2qprr" Dec 03 12:05:25 crc kubenswrapper[4591]: W1203 12:05:25.392238 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96827b8d_1081_4acd_a2db_c2fa3a87b42a.slice/crio-5a70be81fed6ff1b6defdf5d116ee836f334852dbc9de9a735e342e3ff9847a2 WatchSource:0}: Error finding container 5a70be81fed6ff1b6defdf5d116ee836f334852dbc9de9a735e342e3ff9847a2: Status 404 returned error can't find the container with id 5a70be81fed6ff1b6defdf5d116ee836f334852dbc9de9a735e342e3ff9847a2 Dec 03 12:05:25 crc kubenswrapper[4591]: W1203 12:05:25.396033 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19d5b224_0f8a_49a3_84f4_f2c0ef74fda4.slice/crio-0f640d951cf890d84f64e97b4fdd02dbfc664e660e1a6661d1358b64f72da5a4 WatchSource:0}: Error finding container 0f640d951cf890d84f64e97b4fdd02dbfc664e660e1a6661d1358b64f72da5a4: Status 404 returned error can't find the container with id 0f640d951cf890d84f64e97b4fdd02dbfc664e660e1a6661d1358b64f72da5a4 Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.399003 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-5pvst" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.403044 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:25 crc kubenswrapper[4591]: W1203 12:05:25.408611 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd56ae362_837a_44a3_8ec3_d082ac52977b.slice/crio-20eebbe01f1495030e09a7b36bafee11e4e79a2e30091780868049a2f1b26d25 WatchSource:0}: Error finding container 20eebbe01f1495030e09a7b36bafee11e4e79a2e30091780868049a2f1b26d25: Status 404 returned error can't find the container with id 20eebbe01f1495030e09a7b36bafee11e4e79a2e30091780868049a2f1b26d25 Dec 03 12:05:25 crc kubenswrapper[4591]: W1203 12:05:25.426606 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod362420fc_42a2_444d_b450_49ff1c0eb5c2.slice/crio-41db4860240a93056fae4c94ea19e239c01c3c8dccb5ca63d44263f59525aa08 WatchSource:0}: Error finding container 41db4860240a93056fae4c94ea19e239c01c3c8dccb5ca63d44263f59525aa08: Status 404 returned error can't find the container with id 41db4860240a93056fae4c94ea19e239c01c3c8dccb5ca63d44263f59525aa08 Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.433581 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.433611 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.433620 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.433636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.433646 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.535751 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.535791 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.535800 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.535816 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.535828 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.639051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.639532 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.639572 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.639599 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.639611 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.742237 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.742264 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.742272 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.742284 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.742292 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.843765 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.843797 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.843808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.843829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.843837 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.890106 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.890137 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.890161 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:25 crc kubenswrapper[4591]: E1203 12:05:25.890213 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:25 crc kubenswrapper[4591]: E1203 12:05:25.890288 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:25 crc kubenswrapper[4591]: E1203 12:05:25.890354 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.945884 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.945910 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.945918 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.945930 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.945939 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:25Z","lastTransitionTime":"2025-12-03T12:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.975908 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f" exitCode=0 Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.975977 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.976003 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"41db4860240a93056fae4c94ea19e239c01c3c8dccb5ca63d44263f59525aa08"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.979186 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57" exitCode=0 Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.979278 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.979311 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerStarted","Data":"20eebbe01f1495030e09a7b36bafee11e4e79a2e30091780868049a2f1b26d25"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.981681 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerStarted","Data":"dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.981712 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerStarted","Data":"0f640d951cf890d84f64e97b4fdd02dbfc664e660e1a6661d1358b64f72da5a4"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.983580 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.983625 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.983637 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"5a70be81fed6ff1b6defdf5d116ee836f334852dbc9de9a735e342e3ff9847a2"} Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.985831 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:25 crc kubenswrapper[4591]: I1203 12:05:25.992769 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.000515 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.010202 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.019338 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.027571 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.040271 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.048619 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.049046 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.049089 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.049099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.049112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.049121 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.062104 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.072439 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.080821 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.088929 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.096394 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.105385 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.114755 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.124508 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.139964 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.152593 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.152625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.152635 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.152650 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.152658 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.156513 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.164383 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.172602 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.182964 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.191841 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.199967 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.210279 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.218463 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.227884 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.236552 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.244756 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.254422 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.256258 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.256311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.256328 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.256356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.256370 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.265253 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.367517 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.367805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.367881 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.367944 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.367997 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.469863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.469895 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.469904 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.469917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.469926 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.571375 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.571404 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.571411 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.571425 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.571434 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.673345 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.673639 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.673650 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.673664 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.673673 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.775541 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.775579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.775587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.775601 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.775611 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.878458 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.878501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.878510 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.878526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.878535 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.980850 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.980888 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.980896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.980911 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.980920 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:26Z","lastTransitionTime":"2025-12-03T12:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989202 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989272 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989283 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989292 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989299 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.989345 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d"} Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.990503 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4" exitCode=0 Dec 03 12:05:26 crc kubenswrapper[4591]: I1203 12:05:26.990531 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.000602 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:26Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.011879 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.022045 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.032382 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.042663 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.050812 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.058172 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.064386 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.071543 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.079056 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.082606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.082647 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.082656 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.082669 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.082677 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.088515 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.098193 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.111793 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.125877 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.140142 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:27Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.184660 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.184688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.184698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.184711 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.184720 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.287091 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.287119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.287127 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.287139 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.287149 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.389738 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.389777 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.389788 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.389805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.389817 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.491856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.491896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.491908 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.491922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.491933 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.547851 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.548025 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:35.548002573 +0000 UTC m=+32.975042343 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.594131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.594160 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.594170 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.594185 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.594193 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.648968 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.649008 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.649036 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.649056 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649125 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649163 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649203 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649231 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649244 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649208 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:35.649191095 +0000 UTC m=+33.076230865 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649302 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:35.649289 +0000 UTC m=+33.076328769 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649316 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:35.649310009 +0000 UTC m=+33.076349779 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649328 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649381 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649402 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.649517 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:35.649490327 +0000 UTC m=+33.076530097 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.696336 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.696370 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.696382 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.696395 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.696404 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.798880 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.798935 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.798947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.798968 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.798981 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.890161 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.890189 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.890293 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.890390 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.890566 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:27 crc kubenswrapper[4591]: E1203 12:05:27.890650 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.900933 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.900968 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.900978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.900991 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.901003 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:27Z","lastTransitionTime":"2025-12-03T12:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.996928 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f" exitCode=0 Dec 03 12:05:27 crc kubenswrapper[4591]: I1203 12:05:27.996979 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.003055 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.003134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.003154 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.003176 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.003189 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.009986 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.022187 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.033830 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.042452 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.050820 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.059424 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.067701 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.080161 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.093496 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.101931 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.105322 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.105358 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.105368 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.105381 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.105390 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.109895 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.119042 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.127445 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.136315 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.148871 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:28Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.207104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.207135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.207146 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.207163 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.207177 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.309878 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.309917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.309929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.309948 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.309959 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.411930 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.411967 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.411978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.411993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.412007 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.513858 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.513893 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.513904 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.513920 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.513934 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.615570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.615598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.615610 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.615620 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.615629 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.717327 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.717354 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.717367 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.717378 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.717386 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.819293 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.819331 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.819341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.819363 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.819377 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.921438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.921499 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.921510 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.921530 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:28 crc kubenswrapper[4591]: I1203 12:05:28.921541 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:28Z","lastTransitionTime":"2025-12-03T12:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.003274 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.005203 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962" exitCode=0 Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.005246 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.023390 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.023426 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.023437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.023451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.023459 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.028586 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.040438 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.056272 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.067368 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.077693 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.087551 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.097642 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.110178 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.119874 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.124862 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.124891 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.124901 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.124917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.124930 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.129848 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.137711 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.146626 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.155123 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.165928 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.174700 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:29Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.227357 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.227394 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.227405 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.227426 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.227439 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.329562 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.329598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.329609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.329625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.329635 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.432168 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.432356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.432366 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.432379 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.432386 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.534318 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.534344 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.534352 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.534369 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.534380 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.637193 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.637214 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.637224 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.637235 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.637245 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.739437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.739469 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.739477 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.739492 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.739501 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.841228 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.841254 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.841264 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.841276 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.841285 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.890310 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.890310 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.890383 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:29 crc kubenswrapper[4591]: E1203 12:05:29.890512 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:29 crc kubenswrapper[4591]: E1203 12:05:29.890613 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:29 crc kubenswrapper[4591]: E1203 12:05:29.890682 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.943684 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.943804 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.943876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.943939 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:29 crc kubenswrapper[4591]: I1203 12:05:29.944000 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:29Z","lastTransitionTime":"2025-12-03T12:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.011299 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5" exitCode=0 Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.011369 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.022408 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.033642 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.043957 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.045495 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.045522 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.045531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.045544 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.045556 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.055752 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.065915 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.076038 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.085280 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.099464 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.113617 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.122462 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.133838 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.145425 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.147952 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.147993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.148005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.148020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.148030 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.155021 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.164176 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.174429 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:30Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.250145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.250181 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.250189 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.250205 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.250215 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.352578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.352613 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.352623 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.352636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.352645 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.454425 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.454463 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.454471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.454490 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.454502 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.556695 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.556745 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.556754 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.556770 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.556780 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.659094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.659133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.659143 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.659161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.659172 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.761052 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.761098 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.761107 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.761120 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.761146 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.863331 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.863501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.863512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.863528 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.863539 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.965448 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.965483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.965494 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.965513 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:30 crc kubenswrapper[4591]: I1203 12:05:30.965527 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:30Z","lastTransitionTime":"2025-12-03T12:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.020210 4591 generic.go:334] "Generic (PLEG): container finished" podID="d56ae362-837a-44a3-8ec3-d082ac52977b" containerID="cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500" exitCode=0 Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.020290 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerDied","Data":"cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.026763 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.027150 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.027178 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.031598 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.041263 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.054955 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.055329 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.057176 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.066967 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.067250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.067299 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.067312 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.067326 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.067338 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.079584 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.089814 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.100035 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.114704 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.124922 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.139145 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.151425 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.161240 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171482 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171527 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171539 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171562 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171576 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.171906 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.184208 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.196079 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.206211 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.214673 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.222251 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.234082 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.242818 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.253052 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.266253 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.274582 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.274630 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.274648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.274674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.274687 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.275695 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.289553 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.300512 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.310447 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.319828 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.328971 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.340216 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.350420 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.376964 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.376997 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.377370 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.377409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.377428 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.479887 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.479918 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.479929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.479943 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.479957 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.582531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.582565 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.582575 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.582591 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.582601 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.684883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.684919 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.684930 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.684944 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.684953 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.787078 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.787116 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.787128 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.787143 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.787153 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889208 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889258 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889306 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889326 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889346 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889439 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:31 crc kubenswrapper[4591]: E1203 12:05:31.889529 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889571 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.889581 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:31 crc kubenswrapper[4591]: E1203 12:05:31.889610 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:31 crc kubenswrapper[4591]: E1203 12:05:31.889685 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.991449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.991474 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.991484 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.991501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:31 crc kubenswrapper[4591]: I1203 12:05:31.991512 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:31Z","lastTransitionTime":"2025-12-03T12:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.034393 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" event={"ID":"d56ae362-837a-44a3-8ec3-d082ac52977b","Type":"ContainerStarted","Data":"e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.034501 4591 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.045085 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.058053 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.067409 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.080838 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.091813 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.093161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.093209 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.093221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.093241 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.093254 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.102324 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.111648 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.122197 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.131339 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.138622 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.146056 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.153891 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.163804 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.176398 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.183988 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.195415 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.195466 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.195476 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.195487 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.195496 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.297660 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.297798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.297865 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.297920 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.297982 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.399789 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.399839 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.399850 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.399872 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.399885 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.502651 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.507136 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.507275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.507731 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.508493 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.611186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.611236 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.611250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.611270 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.611284 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.714540 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.714868 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.714880 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.714897 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.714908 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.817530 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.817589 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.817606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.817631 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.817649 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.902566 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.916559 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.920042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.920090 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.920101 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.920115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.920125 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:32Z","lastTransitionTime":"2025-12-03T12:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.930377 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.942183 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.950231 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.957866 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.968465 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:32 crc kubenswrapper[4591]: I1203 12:05:32.986748 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.005368 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.014848 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.022198 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.022227 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.022236 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.022249 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.022259 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.025220 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.036307 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.039312 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/0.log" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.042216 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce" exitCode=1 Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.042264 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.042826 4591 scope.go:117] "RemoveContainer" containerID="4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.048232 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.060334 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.070055 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.083034 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"message\\\":\\\"\\\\nI1203 12:05:32.916890 5877 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 12:05:32.916927 5877 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 12:05:32.916946 5877 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 12:05:32.916963 5877 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 12:05:32.917026 5877 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 12:05:32.917043 5877 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 12:05:32.917144 5877 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 12:05:32.917171 5877 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 12:05:32.917172 5877 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 12:05:32.917210 5877 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 12:05:32.917275 5877 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 12:05:32.917646 5877 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 12:05:32.917676 5877 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 12:05:32.917702 5877 factory.go:656] Stopping watch factory\\\\nI1203 12:05:32.917736 5877 ovnkube.go:599] Stopped ovnkube\\\\nI1203 12:05:32.917782 5877 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.091830 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.105227 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.113808 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125117 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125234 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125418 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125385 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.125696 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.134880 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.144427 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.154951 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.165433 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.181199 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.190258 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.198352 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.206575 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.215748 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.224402 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.227914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.227953 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.227964 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.227981 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.227993 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.301339 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.301390 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.301404 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.301428 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.301442 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.319565 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.329598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.329644 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.329654 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.329674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.329686 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.340661 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.343835 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.343869 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.343880 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.343906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.343920 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.357923 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.364691 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.364746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.364762 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.364785 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.364796 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.376288 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.381797 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.381834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.381845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.381863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.381874 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.399749 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.399884 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.401313 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.401346 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.401356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.401371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.401381 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.503296 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.503341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.503351 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.503366 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.503379 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.605414 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.605471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.605485 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.605509 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.605526 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.707482 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.707516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.707526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.707541 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.707552 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.809658 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.809725 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.809736 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.809754 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.809766 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.890224 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.890352 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.890478 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.890521 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.890700 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:33 crc kubenswrapper[4591]: E1203 12:05:33.890869 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.911677 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.911716 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.911737 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.911755 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.911769 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:33Z","lastTransitionTime":"2025-12-03T12:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:33 crc kubenswrapper[4591]: I1203 12:05:33.944533 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.014163 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.014226 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.014248 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.014275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.014290 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.048119 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/1.log" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.048727 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/0.log" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.051946 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3" exitCode=1 Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.051989 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.052050 4591 scope.go:117] "RemoveContainer" containerID="4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.052655 4591 scope.go:117] "RemoveContainer" containerID="5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3" Dec 03 12:05:34 crc kubenswrapper[4591]: E1203 12:05:34.052826 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.067158 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.076611 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.089484 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.098920 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.108707 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.115952 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.116004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.116017 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.116040 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.116053 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.117913 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.125544 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.133428 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.142312 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.149583 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.159337 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.168644 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.178041 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.188169 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.203052 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4ca8df092040cfd5252c9e25d02241d01d33844cb722f5f132de8ffca14414ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"message\\\":\\\"\\\\nI1203 12:05:32.916890 5877 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 12:05:32.916927 5877 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 12:05:32.916946 5877 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 12:05:32.916963 5877 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 12:05:32.917026 5877 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 12:05:32.917043 5877 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 12:05:32.917144 5877 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 12:05:32.917171 5877 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 12:05:32.917172 5877 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 12:05:32.917210 5877 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 12:05:32.917275 5877 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 12:05:32.917646 5877 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 12:05:32.917676 5877 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 12:05:32.917702 5877 factory.go:656] Stopping watch factory\\\\nI1203 12:05:32.917736 5877 ovnkube.go:599] Stopped ovnkube\\\\nI1203 12:05:32.917782 5877 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.219138 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.219197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.219210 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.219238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.219258 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.321691 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.321734 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.321747 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.321769 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.321781 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.424563 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.424611 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.424622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.424642 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.424666 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.529166 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.529217 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.529232 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.529251 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.529272 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.631706 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.631763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.631776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.631798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.631811 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.734099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.734139 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.734149 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.734168 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.734180 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.836454 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.836486 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.836497 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.836511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.836518 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.938236 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.938262 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.938272 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.938285 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:34 crc kubenswrapper[4591]: I1203 12:05:34.938293 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:34Z","lastTransitionTime":"2025-12-03T12:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.040520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.040549 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.040559 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.040570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.040578 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.057391 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/1.log" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.061989 4591 scope.go:117] "RemoveContainer" containerID="5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3" Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.062187 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.073569 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.082792 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.094791 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.104879 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.118669 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.127155 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.136178 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.142431 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.142467 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.142480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.142501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.142513 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.146410 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.155414 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.165433 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.173256 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.183807 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.198835 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.212985 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.222434 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:35Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.245148 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.245188 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.245200 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.245222 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.245235 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.347471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.347521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.347533 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.347551 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.347565 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.450137 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.450175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.450188 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.450208 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.450221 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.552195 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.552237 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.552249 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.552267 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.552283 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.616937 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.617146 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:51.617112404 +0000 UTC m=+49.044152174 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.654597 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.654737 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.654780 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.654851 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.654875 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.717313 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.717356 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.717385 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.717409 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717493 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717548 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717554 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717583 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717610 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717566 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717628 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717638 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717571 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:51.71755284 +0000 UTC m=+49.144592611 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717695 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:51.717673226 +0000 UTC m=+49.144712987 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717710 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:51.717704816 +0000 UTC m=+49.144744587 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.717734 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:05:51.717726998 +0000 UTC m=+49.144766758 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.757408 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.757441 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.757451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.757469 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.757485 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.860107 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.860156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.860169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.860188 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.860205 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.889867 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.889947 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.889878 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.890045 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.890210 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:35 crc kubenswrapper[4591]: E1203 12:05:35.890349 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.962452 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.962488 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.962501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.962515 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:35 crc kubenswrapper[4591]: I1203 12:05:35.962525 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:35Z","lastTransitionTime":"2025-12-03T12:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.064484 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.064521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.064531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.064547 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.064558 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.166438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.166485 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.166498 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.166521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.166536 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.269628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.269961 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.269971 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.269983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.269993 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.371709 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.371750 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.371763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.371779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.371795 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.474155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.474189 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.474200 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.474219 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.474228 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.576060 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.576107 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.576121 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.576135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.576143 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.678618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.678656 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.678667 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.678682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.678693 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.780547 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.780584 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.780594 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.780610 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.780622 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.881894 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.881925 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.881937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.881953 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.881962 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.984099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.984131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.984141 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.984151 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:36 crc kubenswrapper[4591]: I1203 12:05:36.984159 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:36Z","lastTransitionTime":"2025-12-03T12:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.086295 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.086355 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.086371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.086419 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.086438 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.188259 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.188294 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.188304 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.188313 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.188323 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.290368 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.290423 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.290438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.290463 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.290477 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.392592 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.392654 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.392664 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.392689 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.392703 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.494743 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.494792 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.494808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.494824 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.494837 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.596931 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.596970 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.596981 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.596995 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.597008 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.620947 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7"] Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.621430 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.624525 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.625256 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.641152 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.652045 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.662534 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.674831 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.685265 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.696385 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.699257 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.699298 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.699311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.699332 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.699342 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.707149 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.723111 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.733340 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.736265 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4da01412-45d4-4dcf-805d-f75dfcfae828-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.736315 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.736382 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.736433 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpgw8\" (UniqueName: \"kubernetes.io/projected/4da01412-45d4-4dcf-805d-f75dfcfae828-kube-api-access-fpgw8\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.742734 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.752795 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.761956 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.769526 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.777441 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.787575 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.801653 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.801691 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.801704 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.801726 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.801738 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.803748 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:37Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.837180 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4da01412-45d4-4dcf-805d-f75dfcfae828-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.837223 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.837271 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.837304 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpgw8\" (UniqueName: \"kubernetes.io/projected/4da01412-45d4-4dcf-805d-f75dfcfae828-kube-api-access-fpgw8\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.838012 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.838280 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4da01412-45d4-4dcf-805d-f75dfcfae828-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.842785 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4da01412-45d4-4dcf-805d-f75dfcfae828-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.850583 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpgw8\" (UniqueName: \"kubernetes.io/projected/4da01412-45d4-4dcf-805d-f75dfcfae828-kube-api-access-fpgw8\") pod \"ovnkube-control-plane-749d76644c-vgvc7\" (UID: \"4da01412-45d4-4dcf-805d-f75dfcfae828\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.889815 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.889862 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.889902 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:37 crc kubenswrapper[4591]: E1203 12:05:37.889922 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:37 crc kubenswrapper[4591]: E1203 12:05:37.889991 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:37 crc kubenswrapper[4591]: E1203 12:05:37.890122 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.904905 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.904947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.904958 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.904987 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.905001 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:37Z","lastTransitionTime":"2025-12-03T12:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:37 crc kubenswrapper[4591]: I1203 12:05:37.933924 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.007702 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.007755 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.007766 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.007784 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.007796 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.072897 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" event={"ID":"4da01412-45d4-4dcf-805d-f75dfcfae828","Type":"ContainerStarted","Data":"313fdb0887580cac461c6b122116778228be53ae6103b2e9e3a678851d227ef0"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.109914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.109959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.109973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.109994 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.110008 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.212518 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.212569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.212583 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.212603 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.212617 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.314895 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.314935 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.314944 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.314961 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.314972 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.343937 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-5drvq"] Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.344662 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: E1203 12:05:38.344763 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.362907 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.374561 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.385560 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.397517 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.408116 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.417795 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.417920 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.417957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.417973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.417993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.418005 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.429468 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.437597 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.443842 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6bgr\" (UniqueName: \"kubernetes.io/projected/8cd44649-dee5-4a99-8123-059f30fd0c1b-kube-api-access-k6bgr\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.443893 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.447297 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.456532 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.464185 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.471860 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.480148 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.491839 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.501583 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.512984 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.520522 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.520557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.520570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.520606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.520617 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.532161 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.544876 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6bgr\" (UniqueName: \"kubernetes.io/projected/8cd44649-dee5-4a99-8123-059f30fd0c1b-kube-api-access-k6bgr\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.544920 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: E1203 12:05:38.545077 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:38 crc kubenswrapper[4591]: E1203 12:05:38.545135 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:39.045119874 +0000 UTC m=+36.472159644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.558923 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6bgr\" (UniqueName: \"kubernetes.io/projected/8cd44649-dee5-4a99-8123-059f30fd0c1b-kube-api-access-k6bgr\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.628294 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.628361 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.628373 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.628396 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.628408 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.730235 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.730266 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.730275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.730290 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.730301 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.832238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.832277 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.832288 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.832302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.832312 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.934099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.934399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.934409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.934421 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:38 crc kubenswrapper[4591]: I1203 12:05:38.934434 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:38Z","lastTransitionTime":"2025-12-03T12:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.036635 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.036763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.036833 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.036899 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.036963 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.050473 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.050591 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.050671 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:40.050649215 +0000 UTC m=+37.477688995 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.077565 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" event={"ID":"4da01412-45d4-4dcf-805d-f75dfcfae828","Type":"ContainerStarted","Data":"f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.077605 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" event={"ID":"4da01412-45d4-4dcf-805d-f75dfcfae828","Type":"ContainerStarted","Data":"128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.089420 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.100102 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.110787 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.120997 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.130736 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.139938 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.139969 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.140227 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.140238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.140280 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.140292 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.150377 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.160337 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.174552 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.189243 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.198704 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.210428 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.218888 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.236444 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.243111 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.243156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.243169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.243194 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.243207 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.246795 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.256839 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.265823 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.345180 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.345216 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.345226 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.345242 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.345255 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.446923 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.447035 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.447126 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.447193 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.447265 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.549169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.549219 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.549231 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.549247 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.549261 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.651005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.651043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.651051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.651087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.651099 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.753403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.753475 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.753487 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.753510 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.753526 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.855675 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.855703 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.855735 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.855748 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.855758 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.889897 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.889961 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.889969 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.889901 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.890022 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.890130 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.890252 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:39 crc kubenswrapper[4591]: E1203 12:05:39.890372 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.958352 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.958423 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.958445 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.958471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:39 crc kubenswrapper[4591]: I1203 12:05:39.958499 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:39Z","lastTransitionTime":"2025-12-03T12:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.060533 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:40 crc kubenswrapper[4591]: E1203 12:05:40.061145 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:40 crc kubenswrapper[4591]: E1203 12:05:40.061244 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:42.061222265 +0000 UTC m=+39.488262035 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.067735 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.067792 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.067805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.067828 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.067841 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.170401 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.170461 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.170473 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.170496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.170509 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.272705 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.272769 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.272780 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.272804 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.272821 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.374677 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.374719 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.374730 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.374745 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.374759 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.477477 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.477521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.477530 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.477548 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.477562 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.579586 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.579636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.579648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.579668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.579680 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.682175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.682219 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.682232 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.682256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.682269 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.785209 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.785253 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.785262 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.785273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.785281 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.886835 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.886876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.886885 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.886899 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.886911 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.988875 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.988906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.988916 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.988927 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:40 crc kubenswrapper[4591]: I1203 12:05:40.988936 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:40Z","lastTransitionTime":"2025-12-03T12:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.090164 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.090196 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.090207 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.090221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.090231 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.191692 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.191772 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.191794 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.191820 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.191834 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.293785 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.293824 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.293836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.293851 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.293862 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.396447 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.396481 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.396496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.396511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.396520 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.498747 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.498803 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.498820 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.498843 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.498860 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.601055 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.601112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.601122 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.601139 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.601151 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.703371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.703398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.703407 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.703417 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.703428 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.805603 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.805634 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.805642 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.805651 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.805659 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.890268 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.890294 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.890331 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:41 crc kubenswrapper[4591]: E1203 12:05:41.890397 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.890432 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:41 crc kubenswrapper[4591]: E1203 12:05:41.890506 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:41 crc kubenswrapper[4591]: E1203 12:05:41.890588 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:41 crc kubenswrapper[4591]: E1203 12:05:41.890668 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.907004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.907092 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.907106 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.907127 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:41 crc kubenswrapper[4591]: I1203 12:05:41.907139 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:41Z","lastTransitionTime":"2025-12-03T12:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.009475 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.009505 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.009514 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.009524 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.009530 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.081427 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:42 crc kubenswrapper[4591]: E1203 12:05:42.081540 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:42 crc kubenswrapper[4591]: E1203 12:05:42.081618 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:46.08158843 +0000 UTC m=+43.508628201 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.111670 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.111698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.111714 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.111725 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.111733 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.213382 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.213406 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.213415 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.213425 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.213432 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.314738 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.314763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.314773 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.314783 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.314791 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.416840 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.416866 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.416874 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.416885 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.416891 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.518914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.518936 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.518945 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.518957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.518964 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.621355 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.621389 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.621400 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.621411 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.621419 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.723528 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.723564 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.723609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.723626 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.723636 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.824715 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.824740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.824748 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.824758 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.824765 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.900572 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.917091 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.926396 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.926432 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.926444 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.926456 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.926466 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:42Z","lastTransitionTime":"2025-12-03T12:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.934020 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.946905 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.956786 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.967977 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.978501 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.987131 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:42 crc kubenswrapper[4591]: I1203 12:05:42.997624 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.006855 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.020095 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028376 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028462 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028488 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028503 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.028727 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.036095 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.048232 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.057109 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.067723 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.080621 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.130351 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.130504 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.130562 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.130632 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.130692 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.232254 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.232298 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.232311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.232328 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.232340 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.334300 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.334363 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.334378 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.334410 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.334430 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.428814 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.428852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.428865 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.428899 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.428911 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.439184 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.441946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.441982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.441992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.442007 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.442020 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.450613 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.452961 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.452994 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.453006 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.453019 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.453029 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.461473 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.463671 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.463702 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.463722 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.463734 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.463743 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.471525 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.474609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.474643 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.474657 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.474670 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.474679 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.483658 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.483778 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.484962 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.484995 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.485005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.485016 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.485027 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.586946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.586983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.586993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.587005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.587012 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.689480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.689511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.689541 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.689555 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.689566 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.791799 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.791834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.791844 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.791856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.791867 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.890595 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.890651 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.890692 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.890603 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.890736 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.890859 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.890947 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:43 crc kubenswrapper[4591]: E1203 12:05:43.891039 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.894193 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.894250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.894262 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.894284 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.894312 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.996493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.996525 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.996535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.996547 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:43 crc kubenswrapper[4591]: I1203 12:05:43.996556 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:43Z","lastTransitionTime":"2025-12-03T12:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.098727 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.098776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.098787 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.098805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.098820 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.201489 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.201521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.201532 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.201548 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.201558 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.303406 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.303430 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.303438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.303449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.303457 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.405011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.405036 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.405045 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.405056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.405085 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.507088 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.507122 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.507134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.507149 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.507159 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.609292 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.609346 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.609374 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.609386 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.609398 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.711568 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.711618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.711632 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.711646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.711664 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.813762 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.813798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.813807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.813820 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.813830 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.917401 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.917433 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.917442 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.917453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:44 crc kubenswrapper[4591]: I1203 12:05:44.917462 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:44Z","lastTransitionTime":"2025-12-03T12:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.019836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.019856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.019864 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.019876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.019886 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.121607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.121641 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.121648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.121662 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.121672 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.223497 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.223533 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.223544 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.223557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.223567 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.326011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.326109 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.326122 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.326153 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.326167 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.429168 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.429218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.429228 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.429250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.429262 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.531038 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.531100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.531113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.531138 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.531152 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.633578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.633621 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.633629 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.633646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.633656 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.735428 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.735463 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.735472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.735485 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.735494 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.837981 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.838020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.838032 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.838050 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.838060 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.889830 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.889873 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.889873 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.889936 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:45 crc kubenswrapper[4591]: E1203 12:05:45.890102 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:45 crc kubenswrapper[4591]: E1203 12:05:45.890175 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:45 crc kubenswrapper[4591]: E1203 12:05:45.890311 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:45 crc kubenswrapper[4591]: E1203 12:05:45.890430 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.940324 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.940362 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.940374 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.940388 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:45 crc kubenswrapper[4591]: I1203 12:05:45.940398 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:45Z","lastTransitionTime":"2025-12-03T12:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.041922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.041963 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.041974 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.041995 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.042006 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.120281 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:46 crc kubenswrapper[4591]: E1203 12:05:46.120438 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:46 crc kubenswrapper[4591]: E1203 12:05:46.120497 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:05:54.120481677 +0000 UTC m=+51.547521447 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.144079 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.144124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.144136 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.144153 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.144165 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.245841 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.245872 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.245882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.245897 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.245908 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.347243 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.347280 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.347292 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.347305 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.347316 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.449096 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.449132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.449142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.449160 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.449168 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.550861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.550894 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.550902 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.550918 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.550930 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.652585 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.652628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.652638 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.652659 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.652675 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.754220 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.754256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.754265 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.754275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.754309 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.856740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.856779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.856790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.856805 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.856815 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.959033 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.959098 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.959110 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.959124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:46 crc kubenswrapper[4591]: I1203 12:05:46.959136 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:46Z","lastTransitionTime":"2025-12-03T12:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.061200 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.061241 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.061250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.061264 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.061278 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.163399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.163435 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.163444 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.163457 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.163466 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.265552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.265578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.265587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.265600 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.265609 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.368168 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.368203 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.368214 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.368225 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.368233 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.470238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.470272 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.470282 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.470296 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.470305 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.572760 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.572815 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.572826 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.572845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.572857 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.674526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.674572 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.674589 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.674618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.674632 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.776920 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.776957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.776969 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.776983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.776994 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.878375 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.878399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.878408 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.878419 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.878428 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.889851 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.889960 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.889964 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:47 crc kubenswrapper[4591]: E1203 12:05:47.890099 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.890169 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:47 crc kubenswrapper[4591]: E1203 12:05:47.890227 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:47 crc kubenswrapper[4591]: E1203 12:05:47.890404 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:47 crc kubenswrapper[4591]: E1203 12:05:47.890447 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.979867 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.979931 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.979942 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.979963 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:47 crc kubenswrapper[4591]: I1203 12:05:47.979978 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:47Z","lastTransitionTime":"2025-12-03T12:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.082027 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.082093 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.082106 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.082119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.082130 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.184009 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.184051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.184060 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.184094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.184107 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.286345 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.286379 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.286387 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.286401 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.286410 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.388134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.388170 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.388178 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.388191 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.388201 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.490581 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.490606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.490614 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.490625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.490634 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.592855 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.592913 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.592922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.592951 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.592969 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.695287 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.695321 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.695330 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.695340 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.695348 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.797521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.797553 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.797566 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.797577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.797585 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.899285 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.899332 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.899345 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.899362 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:48 crc kubenswrapper[4591]: I1203 12:05:48.899375 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:48Z","lastTransitionTime":"2025-12-03T12:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.001528 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.001563 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.001574 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.001587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.001598 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.103741 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.103768 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.103779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.103793 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.103805 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.206148 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.206169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.206178 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.206189 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.206197 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.307795 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.307850 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.307861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.307879 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.307890 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.410297 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.410334 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.410342 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.410355 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.410364 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.512434 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.512469 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.512478 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.512494 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.512503 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.614199 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.614230 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.614237 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.614249 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.614258 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.716683 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.716733 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.716743 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.716757 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.716769 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.818170 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.818205 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.818215 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.818229 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.818238 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.890395 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.890406 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.890433 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:49 crc kubenswrapper[4591]: E1203 12:05:49.890601 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.890630 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:49 crc kubenswrapper[4591]: E1203 12:05:49.890810 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:49 crc kubenswrapper[4591]: E1203 12:05:49.890886 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:49 crc kubenswrapper[4591]: E1203 12:05:49.890934 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.891380 4591 scope.go:117] "RemoveContainer" containerID="5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.920271 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.920332 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.920345 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.920368 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:49 crc kubenswrapper[4591]: I1203 12:05:49.920380 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:49Z","lastTransitionTime":"2025-12-03T12:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.022764 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.022972 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.022984 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.023012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.023027 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.110183 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/1.log" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.112963 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.113552 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.124489 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.125420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.125450 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.125460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.125475 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.125486 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.136328 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.171474 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.187842 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.205941 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.215795 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.227014 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.228029 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.228082 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.228094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.228111 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.228122 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.237327 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.252840 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.263356 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.274441 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.289723 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.304099 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.315474 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.329355 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.330762 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.330824 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.330836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.330857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.330887 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.347497 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.358903 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.433828 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.433883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.433896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.433919 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.433947 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.536148 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.536189 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.536200 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.536218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.536229 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.638621 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.638666 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.638675 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.638690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.638712 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.742263 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.742404 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.742483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.742577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.742642 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.845214 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.845275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.845289 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.845598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.845637 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.947487 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.947527 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.947537 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.947553 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:50 crc kubenswrapper[4591]: I1203 12:05:50.947571 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:50Z","lastTransitionTime":"2025-12-03T12:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.049877 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.049917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.049927 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.049944 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.049956 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.117167 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/2.log" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.117643 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/1.log" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.119885 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" exitCode=1 Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.119928 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.119966 4591 scope.go:117] "RemoveContainer" containerID="5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.125143 4591 scope.go:117] "RemoveContainer" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.125444 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.137806 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.152427 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.152458 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.152466 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.152502 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.152514 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.153855 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d45e61c0e0990fe3b91d7d635ed38528c012e0d924a480db264110901c99ce3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:33Z\\\",\\\"message\\\":\\\"false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1203 12:05:33.739451 6008 services_controller.go:452] Built service openshift-image-registry/image-registry per-node LB for network=default: []services.LB{}\\\\nI1203 12:05:33.739459 6008 services_controller.go:453] Built service openshift-image-registry/image-registry template LB for network=default: []services.LB{}\\\\nF1203 12:05:33.739123 6008 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificat\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.171749 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.182191 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.191338 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.202949 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.214529 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.224371 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.234630 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.245914 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.254746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.254798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.254814 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.254845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.254859 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.255257 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.264375 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.272934 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.282181 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.290920 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.298415 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.306362 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:51Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.357133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.357159 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.357169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.357182 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.357194 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.459226 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.459266 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.459277 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.459297 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.459311 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.561282 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.561328 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.561341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.561363 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.561378 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.663131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.663165 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.663174 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.663188 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.663200 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.675711 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.675905 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:06:23.675881997 +0000 UTC m=+81.102921767 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.765364 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.765409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.765420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.765438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.765450 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.776962 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.776994 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.777017 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.777042 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777083 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777121 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777139 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:06:23.777123594 +0000 UTC m=+81.204163363 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777140 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777163 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:06:23.777154412 +0000 UTC m=+81.204194181 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777169 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777186 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777215 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:06:23.777207821 +0000 UTC m=+81.204247591 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777310 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777351 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777372 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.777465 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:06:23.777436423 +0000 UTC m=+81.204476222 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.866763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.866808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.866817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.866836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.866847 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.889515 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.889535 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.889527 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.889518 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.889649 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.889759 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.889853 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:51 crc kubenswrapper[4591]: E1203 12:05:51.889972 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.969167 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.969211 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.969221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.969236 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:51 crc kubenswrapper[4591]: I1203 12:05:51.969247 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:51Z","lastTransitionTime":"2025-12-03T12:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.071011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.071037 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.071046 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.071056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.071084 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.123953 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/2.log" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.130199 4591 scope.go:117] "RemoveContainer" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" Dec 03 12:05:52 crc kubenswrapper[4591]: E1203 12:05:52.130367 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.141915 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.150148 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.158802 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.168094 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.173431 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.173483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.173493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.173505 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.173514 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.176232 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.183769 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.192852 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.201446 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.214320 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.229874 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.238776 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.248654 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.255397 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.264694 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.274403 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.275946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.275997 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.276012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.276034 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.276050 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.283565 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.291648 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.378577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.378623 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.378633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.378648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.378659 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.480388 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.480417 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.480427 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.480440 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.480467 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.582224 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.582256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.582264 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.582275 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.582285 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.684227 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.684251 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.684260 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.684270 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.684277 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.785637 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.785674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.785687 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.785716 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.785726 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.887468 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.887491 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.887498 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.887509 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.887517 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.899889 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.912194 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.925971 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.935604 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.946381 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.955082 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.964115 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.972896 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.980781 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.989165 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.989209 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.989221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.989239 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.989249 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:52Z","lastTransitionTime":"2025-12-03T12:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.990746 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:52 crc kubenswrapper[4591]: I1203 12:05:52.998381 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.006924 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.017555 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.024486 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.031124 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.040831 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.053174 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.091952 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.092002 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.092012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.092027 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.092037 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.193633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.193668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.193679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.193694 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.193719 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.295427 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.295538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.295602 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.295664 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.295746 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.398087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.398129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.398142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.398165 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.398174 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.500106 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.500141 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.500150 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.500163 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.500174 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.574092 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.574276 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.574384 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.574508 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.574604 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.583432 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.586241 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.586308 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.586321 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.586347 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.586361 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.595739 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.599368 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.599403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.599416 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.599429 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.599441 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.607846 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.610607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.610648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.610668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.610683 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.610694 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.619166 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.622536 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.622622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.622682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.622756 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.622817 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.631104 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.631338 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.632395 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.632419 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.632431 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.632443 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.632453 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.734505 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.734535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.734544 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.734557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.734570 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.837094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.837132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.837142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.837157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.837167 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.890039 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.890043 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.890152 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.890421 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.890538 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.890650 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.890878 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:53 crc kubenswrapper[4591]: E1203 12:05:53.891003 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.938985 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.939096 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.939162 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.939232 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:53 crc kubenswrapper[4591]: I1203 12:05:53.939288 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:53Z","lastTransitionTime":"2025-12-03T12:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.041572 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.041599 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.041611 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.041625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.041634 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.147370 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.147400 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.147439 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.147474 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.147487 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.200320 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:54 crc kubenswrapper[4591]: E1203 12:05:54.200465 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:54 crc kubenswrapper[4591]: E1203 12:05:54.200531 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:06:10.200511315 +0000 UTC m=+67.627551105 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.249480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.249507 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.249517 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.249531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.249541 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.352475 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.352520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.352535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.352557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.352571 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.454457 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.454498 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.454511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.454526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.454538 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.556825 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.556869 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.556880 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.556898 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.556910 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.659202 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.659244 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.659256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.659268 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.659278 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.761125 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.761165 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.761178 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.761197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.761212 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.863632 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.863668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.863679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.863692 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.863712 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.982379 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.982428 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.982441 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.982458 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:54 crc kubenswrapper[4591]: I1203 12:05:54.982472 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:54Z","lastTransitionTime":"2025-12-03T12:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.084649 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.084789 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.084845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.084914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.084977 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.187004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.187123 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.187191 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.187244 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.187299 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.288625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.288796 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.288856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.288908 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.288959 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.390978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.391009 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.391020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.391034 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.391044 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.493489 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.493520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.493529 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.493545 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.493555 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.595796 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.595842 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.595852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.595865 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.595877 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.697830 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.697853 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.697861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.697887 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.697895 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.799808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.799829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.799856 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.799866 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.799877 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.890101 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.890211 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.890383 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:55 crc kubenswrapper[4591]: E1203 12:05:55.890374 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.890418 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:55 crc kubenswrapper[4591]: E1203 12:05:55.890497 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:55 crc kubenswrapper[4591]: E1203 12:05:55.890597 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:55 crc kubenswrapper[4591]: E1203 12:05:55.890895 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.901730 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.901764 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.901775 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.901789 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:55 crc kubenswrapper[4591]: I1203 12:05:55.901798 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:55Z","lastTransitionTime":"2025-12-03T12:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.004132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.004163 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.004174 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.004185 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.004196 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.106848 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.106884 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.106894 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.106912 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.106922 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.209357 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.209625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.209718 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.209817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.209897 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.312508 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.312807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.312880 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.312964 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.313023 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.415372 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.415429 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.415442 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.415462 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.415480 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.517854 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.517901 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.517912 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.517933 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.517949 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.619628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.619721 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.619735 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.619754 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.619775 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.721520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.721569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.721580 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.721597 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.721611 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.823393 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.823493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.823506 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.823520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.823532 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.926228 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.926278 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.926287 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.926303 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:56 crc kubenswrapper[4591]: I1203 12:05:56.926314 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:56Z","lastTransitionTime":"2025-12-03T12:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.028425 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.028468 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.028480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.028494 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.028506 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.130668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.130699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.130716 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.130729 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.130739 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.232872 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.232911 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.232920 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.232934 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.232946 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.334959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.334990 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.334999 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.335008 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.335016 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.436832 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.436876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.436885 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.436897 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.436907 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.538290 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.538320 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.538329 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.538341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.538353 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.640487 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.640516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.640525 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.640535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.640543 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.742579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.742614 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.742625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.742637 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.742645 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.844955 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.844982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.844991 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.845007 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.845016 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.890411 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.890503 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:57 crc kubenswrapper[4591]: E1203 12:05:57.890521 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:57 crc kubenswrapper[4591]: E1203 12:05:57.890648 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.890704 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:57 crc kubenswrapper[4591]: E1203 12:05:57.890777 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.890828 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:57 crc kubenswrapper[4591]: E1203 12:05:57.890876 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.946726 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.946759 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.946768 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.946781 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:57 crc kubenswrapper[4591]: I1203 12:05:57.946793 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:57Z","lastTransitionTime":"2025-12-03T12:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.048578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.048607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.048616 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.048627 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.048634 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.149911 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.149937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.149947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.149959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.149970 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.157122 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.165370 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.173280 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.181906 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.190253 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.198579 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.207451 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.214958 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.223864 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.233244 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.242010 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251869 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251877 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251889 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251898 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.251935 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.259577 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.266895 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.274997 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.283705 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.291289 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.299699 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.312571 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:58Z is after 2025-08-24T17:21:41Z" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.354538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.354593 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.354603 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.354621 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.354633 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.456140 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.456188 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.456201 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.456222 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.456234 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.557700 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.557749 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.557761 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.557775 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.557786 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.660112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.660166 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.660177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.660194 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.660208 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.762696 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.762775 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.762788 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.762806 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.762820 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.864906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.864935 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.864945 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.864956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.864967 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.966411 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.966456 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.966465 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.966477 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:58 crc kubenswrapper[4591]: I1203 12:05:58.966486 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:58Z","lastTransitionTime":"2025-12-03T12:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.068238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.068265 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.068273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.068285 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.068293 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.169595 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.169624 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.169633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.169643 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.169650 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.272267 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.272295 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.272304 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.272315 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.272323 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.374569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.374606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.374619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.374633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.374640 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.476607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.476668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.476679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.476697 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.476709 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.578594 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.578628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.578637 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.578648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.578656 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.679938 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.679973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.679982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.679996 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.680005 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.783061 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.783121 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.783131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.783142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.783150 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.885165 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.885192 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.885201 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.885215 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.885224 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.889876 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.889916 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.889916 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.889876 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:05:59 crc kubenswrapper[4591]: E1203 12:05:59.890000 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:05:59 crc kubenswrapper[4591]: E1203 12:05:59.890128 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:05:59 crc kubenswrapper[4591]: E1203 12:05:59.890234 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:05:59 crc kubenswrapper[4591]: E1203 12:05:59.890292 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.986747 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.986798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.986811 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.986827 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:05:59 crc kubenswrapper[4591]: I1203 12:05:59.986837 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:05:59Z","lastTransitionTime":"2025-12-03T12:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.088632 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.088667 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.088678 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.088690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.088704 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.191118 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.191151 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.191161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.191174 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.191184 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.293156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.293194 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.293205 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.293218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.293228 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.394790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.394823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.394834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.394847 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.394857 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.496859 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.496886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.496894 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.496905 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.496915 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.599113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.599142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.599150 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.599161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.599171 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.701256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.701288 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.701299 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.701312 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.701321 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.803366 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.803467 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.803478 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.803496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.803506 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.905758 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.905789 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.905799 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.905814 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:00 crc kubenswrapper[4591]: I1203 12:06:00.905823 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:00Z","lastTransitionTime":"2025-12-03T12:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.008480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.008540 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.008552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.008569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.008583 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.110085 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.110121 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.110132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.110144 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.110153 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.212339 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.212376 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.212385 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.212398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.212409 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.314675 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.314726 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.314737 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.314752 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.314762 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.417204 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.417264 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.417279 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.417300 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.417320 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.519212 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.519254 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.519268 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.519282 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.519290 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.621351 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.621407 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.621418 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.621442 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.621459 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.723633 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.723680 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.723693 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.723710 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.723738 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.826049 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.826099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.826113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.826126 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.826134 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.890213 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.890258 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:01 crc kubenswrapper[4591]: E1203 12:06:01.890330 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.890345 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:01 crc kubenswrapper[4591]: E1203 12:06:01.890470 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.890490 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:01 crc kubenswrapper[4591]: E1203 12:06:01.890541 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:01 crc kubenswrapper[4591]: E1203 12:06:01.890615 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.927747 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.927776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.927786 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.927804 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:01 crc kubenswrapper[4591]: I1203 12:06:01.927812 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:01Z","lastTransitionTime":"2025-12-03T12:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.030221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.030254 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.030263 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.030276 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.030286 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.132142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.132175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.132185 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.132198 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.132210 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.234093 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.234161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.234179 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.234206 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.234219 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.335621 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.335749 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.335837 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.335913 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.335979 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.437900 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.437922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.437929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.437940 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.437952 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.540317 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.540347 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.540359 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.540374 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.540386 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.642434 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.642537 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.642594 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.642674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.642753 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.745043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.745116 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.745126 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.745149 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.745165 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.846483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.846526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.846538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.846558 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.846575 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.904037 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.913183 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.925885 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.934503 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.943348 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.948500 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.948531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.948541 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.948558 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.948568 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:02Z","lastTransitionTime":"2025-12-03T12:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.951362 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.959991 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.969510 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.978625 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.985357 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:02 crc kubenswrapper[4591]: I1203 12:06:02.993167 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.000555 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.007221 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.013732 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.020907 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.028964 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.035947 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.043432 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.050829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.050864 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.050874 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.050889 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.050902 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.153011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.153080 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.153090 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.153108 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.153119 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.255347 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.255392 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.255403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.255420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.255431 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.357455 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.357492 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.357501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.357516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.357526 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.459006 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.459134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.459206 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.459273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.459334 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.560829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.560860 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.560869 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.560882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.560892 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.662798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.662829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.662838 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.662850 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.662858 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.718823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.718847 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.718854 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.718865 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.718873 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.728343 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.731446 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.731469 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.731479 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.731490 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.731497 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.739817 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.741996 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.742017 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.742024 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.742035 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.742043 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.750548 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.753052 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.753094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.753104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.753113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.753120 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.760828 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.762975 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.763001 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.763011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.763021 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.763028 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.770777 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.770879 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.771654 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.771691 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.771700 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.771710 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.771717 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.875117 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.875155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.875164 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.875177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.875184 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.889758 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.889784 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.889830 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.889758 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.889930 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.889934 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.889966 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:03 crc kubenswrapper[4591]: E1203 12:06:03.890008 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.977395 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.977595 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.977668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.977739 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:03 crc kubenswrapper[4591]: I1203 12:06:03.977796 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:03Z","lastTransitionTime":"2025-12-03T12:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.079924 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.079950 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.079959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.079979 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.079985 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.181407 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.181434 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.181443 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.181453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.181461 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.283118 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.283139 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.283146 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.283155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.283161 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.385618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.385657 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.385666 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.385679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.385688 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.487137 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.487169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.487179 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.487191 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.487201 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.588360 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.588539 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.588612 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.588687 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.588765 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.690305 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.690417 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.690482 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.690550 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.690605 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.792512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.792542 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.792550 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.792561 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.792568 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.890217 4591 scope.go:117] "RemoveContainer" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" Dec 03 12:06:04 crc kubenswrapper[4591]: E1203 12:06:04.890370 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.893591 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.893677 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.893753 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.893831 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.893900 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.996056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.996197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.996287 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.996357 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:04 crc kubenswrapper[4591]: I1203 12:06:04.996519 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:04Z","lastTransitionTime":"2025-12-03T12:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.098756 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.098812 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.098823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.098844 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.098859 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.200847 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.201084 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.201173 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.201249 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.201315 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.303025 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.303056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.303083 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.303108 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.303117 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.405124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.405169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.405181 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.405197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.405208 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.507261 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.507302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.507327 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.507342 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.507353 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.608858 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.608886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.608896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.608907 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.608915 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.710159 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.710186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.710195 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.710205 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.710214 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.812453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.812493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.812503 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.812514 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.812525 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.889774 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.889798 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.889824 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:05 crc kubenswrapper[4591]: E1203 12:06:05.889892 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.889905 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:05 crc kubenswrapper[4591]: E1203 12:06:05.890000 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:05 crc kubenswrapper[4591]: E1203 12:06:05.890513 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:05 crc kubenswrapper[4591]: E1203 12:06:05.890609 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.913661 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.913688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.913698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.913710 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:05 crc kubenswrapper[4591]: I1203 12:06:05.913719 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:05Z","lastTransitionTime":"2025-12-03T12:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.015636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.015667 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.015676 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.015687 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.015697 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.117566 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.117606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.117617 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.117631 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.117641 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.219523 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.219555 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.219567 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.219581 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.219592 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.321537 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.321569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.321579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.321590 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.321601 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.423646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.423674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.423683 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.423694 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.423704 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.525586 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.525616 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.525626 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.525638 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.525647 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.627776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.627800 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.627807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.627817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.627827 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.729292 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.729320 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.729329 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.729341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.729350 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.831354 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.831388 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.831398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.831412 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.831421 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.933035 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.933077 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.933087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.933096 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:06 crc kubenswrapper[4591]: I1203 12:06:06.933103 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:06Z","lastTransitionTime":"2025-12-03T12:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.034776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.034807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.034817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.034829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.034838 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.136361 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.136385 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.136393 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.136404 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.136434 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.238340 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.238374 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.238384 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.238399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.238408 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.340613 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.340635 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.340643 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.340653 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.340660 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.442625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.442748 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.442830 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.442911 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.442972 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.544433 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.544668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.544779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.544921 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.545052 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.646579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.646685 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.646767 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.646839 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.646891 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.748087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.748105 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.748159 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.748184 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.748191 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.849990 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.850330 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.850415 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.850486 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.850546 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.889653 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.889694 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.889708 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:07 crc kubenswrapper[4591]: E1203 12:06:07.889777 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:07 crc kubenswrapper[4591]: E1203 12:06:07.889853 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.889928 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:07 crc kubenswrapper[4591]: E1203 12:06:07.890003 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:07 crc kubenswrapper[4591]: E1203 12:06:07.890175 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.952452 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.952552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.952619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.952690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:07 crc kubenswrapper[4591]: I1203 12:06:07.952758 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:07Z","lastTransitionTime":"2025-12-03T12:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.055207 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.055233 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.055242 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.055253 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.055263 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.156753 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.156787 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.156797 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.156808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.156817 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.258133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.258156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.258166 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.258176 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.258185 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.359688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.359712 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.359722 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.359747 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.359755 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.461575 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.461598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.461607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.461617 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.461624 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.565953 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.565992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.566002 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.566016 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.566025 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.668031 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.668091 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.668103 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.668115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.668126 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.769507 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.769570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.769583 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.769609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.769627 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.871099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.871423 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.871514 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.871595 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.871662 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.972959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.972997 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.973007 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.973024 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:08 crc kubenswrapper[4591]: I1203 12:06:08.973035 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:08Z","lastTransitionTime":"2025-12-03T12:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.075208 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.075270 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.075286 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.075312 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.075325 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.177210 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.177250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.177262 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.177279 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.177292 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.279025 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.279055 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.279095 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.279108 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.279118 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.381248 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.381284 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.381295 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.381310 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.381321 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.483184 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.483220 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.483230 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.483244 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.483255 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.584838 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.584884 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.584895 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.584915 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.584928 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.686902 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.687046 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.687152 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.687229 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.687294 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.788891 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.789022 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.789129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.789207 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.789266 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.890124 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:09 crc kubenswrapper[4591]: E1203 12:06:09.890224 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.890125 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:09 crc kubenswrapper[4591]: E1203 12:06:09.890302 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.890127 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:09 crc kubenswrapper[4591]: E1203 12:06:09.890362 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.890829 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:09 crc kubenswrapper[4591]: E1203 12:06:09.890943 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.891012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.891030 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.891038 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.891050 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.891078 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.992580 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.992609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.992618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.992629 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:09 crc kubenswrapper[4591]: I1203 12:06:09.992636 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:09Z","lastTransitionTime":"2025-12-03T12:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.094365 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.094396 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.094406 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.094420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.094430 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.195449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.195474 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.195505 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.195519 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.195532 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.239805 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:10 crc kubenswrapper[4591]: E1203 12:06:10.239957 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:06:10 crc kubenswrapper[4591]: E1203 12:06:10.240008 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:06:42.239991916 +0000 UTC m=+99.667031685 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.298255 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.298291 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.298302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.298314 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.298324 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.400311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.400344 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.400353 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.400371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.400380 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.502138 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.502175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.502186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.502198 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.502207 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.603944 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.603979 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.603988 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.604000 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.604011 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.705722 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.705768 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.705780 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.705793 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.705802 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.807945 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.808019 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.808031 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.808057 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.808086 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.910004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.910043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.910058 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.910088 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:10 crc kubenswrapper[4591]: I1203 12:06:10.910097 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:10Z","lastTransitionTime":"2025-12-03T12:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.012058 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.012113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.012127 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.012140 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.012149 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.113848 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.113878 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.113887 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.113897 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.113906 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.184481 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/0.log" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.184523 4591 generic.go:334] "Generic (PLEG): container finished" podID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" containerID="dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f" exitCode=1 Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.184553 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerDied","Data":"dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.184859 4591 scope.go:117] "RemoveContainer" containerID="dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.197528 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.212597 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.216053 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.216095 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.216105 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.216116 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.216124 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.223853 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.239165 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.249799 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.261525 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.270852 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.281596 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.290489 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.300297 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.311776 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.317521 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.317567 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.317582 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.317609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.317623 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.320204 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.330248 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.339904 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.347945 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.357394 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.365240 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.371548 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:11Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.420676 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.420776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.420788 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.420808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.420819 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.523166 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.523201 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.523210 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.523223 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.523232 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.624946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.625003 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.625014 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.625030 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.625041 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.726992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.727030 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.727041 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.727079 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.727092 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.830223 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.830269 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.830281 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.830308 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.830320 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.889553 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.889613 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.889637 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:11 crc kubenswrapper[4591]: E1203 12:06:11.889680 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.889651 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:11 crc kubenswrapper[4591]: E1203 12:06:11.889824 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:11 crc kubenswrapper[4591]: E1203 12:06:11.889965 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:11 crc kubenswrapper[4591]: E1203 12:06:11.890144 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.932827 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.932863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.932874 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.932889 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:11 crc kubenswrapper[4591]: I1203 12:06:11.932900 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:11Z","lastTransitionTime":"2025-12-03T12:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.034858 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.034885 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.034896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.034911 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.034921 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.136919 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.136963 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.136977 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.137000 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.137011 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.189792 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/0.log" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.189846 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerStarted","Data":"d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.204173 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.212855 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.221973 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.231275 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.238884 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.238937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.238949 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.238964 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.238977 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.241559 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.249899 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.266756 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.300117 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.314220 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.324099 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.332367 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.339607 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.340577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.340607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.340617 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.340648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.340664 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.348356 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.358255 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.371514 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.379540 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.392503 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.401423 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.443367 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.443408 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.443421 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.443444 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.443459 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.545460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.545505 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.545516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.545535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.545549 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.647757 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.647789 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.647801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.647813 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.647823 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.749857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.749891 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.749904 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.749917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.749927 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.851481 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.851513 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.851527 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.851544 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.851555 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.903171 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.920533 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.931005 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.945302 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.953608 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.953636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.953646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.953657 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.953669 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:12Z","lastTransitionTime":"2025-12-03T12:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.956498 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.970300 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.980692 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:12 crc kubenswrapper[4591]: I1203 12:06:12.995639 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.003711 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.012102 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.021952 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.028894 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.036540 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.044935 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.053621 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.055819 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.055851 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.055861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.055875 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.055885 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.061486 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.071564 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.086087 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.157419 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.157451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.157460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.157472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.157484 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.259131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.259163 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.259175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.259190 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.259199 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.361678 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.361719 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.361771 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.361785 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.361795 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.463766 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.463790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.463799 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.463813 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.463821 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.565453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.565489 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.565500 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.565515 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.565527 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.667194 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.667225 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.667234 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.667249 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.667261 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.768441 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.768470 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.768480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.768496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.768508 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.869939 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.869968 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.869979 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.869992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.870002 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.890192 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.890204 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.890211 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.890286 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.890348 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.890399 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.890450 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.890509 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.969656 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.969679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.969688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.969699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.969707 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.979812 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.982087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.982115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.982124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.982134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.982143 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:13 crc kubenswrapper[4591]: E1203 12:06:13.990972 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.994758 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.994783 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.994791 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.994800 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:13 crc kubenswrapper[4591]: I1203 12:06:13.994809 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:13Z","lastTransitionTime":"2025-12-03T12:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: E1203 12:06:14.002932 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.005369 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.005389 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.005398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.005409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.005416 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: E1203 12:06:14.020338 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.023711 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.023756 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.023767 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.023782 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.023792 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: E1203 12:06:14.032846 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:14 crc kubenswrapper[4591]: E1203 12:06:14.032960 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.033895 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.033914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.033924 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.033934 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.033942 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.135504 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.135560 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.135574 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.135588 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.135611 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.237424 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.237454 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.237462 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.237473 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.237482 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.339185 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.339218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.339227 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.339238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.339245 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.440426 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.440466 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.440479 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.440495 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.440507 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.542532 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.542561 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.542570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.542584 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.542593 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.644150 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.644177 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.644186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.644199 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.644206 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.745801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.745855 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.745883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.745896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.745906 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.848081 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.848120 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.848130 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.848144 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.848157 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.949830 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.949865 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.949874 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.949886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:14 crc kubenswrapper[4591]: I1203 12:06:14.949895 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:14Z","lastTransitionTime":"2025-12-03T12:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.051690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.051752 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.051764 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.051785 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.051804 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.153099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.153147 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.153157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.153175 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.153192 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.254940 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.254974 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.254984 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.255013 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.255023 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.356627 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.356681 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.356692 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.356705 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.356716 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.458301 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.458334 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.458344 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.458358 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.458368 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.560552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.560579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.560588 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.560599 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.560607 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.662755 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.662792 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.662801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.662814 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.662823 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.764398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.764428 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.764436 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.764448 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.764458 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.865637 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.865669 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.865679 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.865690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.865699 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.890278 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.890387 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.890404 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:15 crc kubenswrapper[4591]: E1203 12:06:15.890499 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.890648 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:15 crc kubenswrapper[4591]: E1203 12:06:15.890719 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:15 crc kubenswrapper[4591]: E1203 12:06:15.890853 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:15 crc kubenswrapper[4591]: E1203 12:06:15.890976 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.967778 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.967806 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.967816 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.967828 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:15 crc kubenswrapper[4591]: I1203 12:06:15.967840 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:15Z","lastTransitionTime":"2025-12-03T12:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.069710 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.069757 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.069770 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.069784 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.069794 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.172160 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.172192 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.172201 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.172212 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.172220 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.274974 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.275030 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.275041 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.275083 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.275098 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.377135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.377181 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.377191 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.377206 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.377218 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.479238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.479290 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.479300 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.479314 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.479325 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.581325 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.581362 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.581371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.581387 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.581398 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.683788 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.683851 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.683862 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.683882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.683893 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.785352 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.785389 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.785403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.785418 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.785427 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.887044 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.887094 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.887104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.887115 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.887122 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.890008 4591 scope.go:117] "RemoveContainer" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.989552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.989592 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.989602 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.989618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:16 crc kubenswrapper[4591]: I1203 12:06:16.989629 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:16Z","lastTransitionTime":"2025-12-03T12:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.091053 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.091103 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.091112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.091129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.091140 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.193045 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.193098 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.193108 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.193122 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.193132 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.207736 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/2.log" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.210209 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.210601 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.223416 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.238611 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.250711 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.258545 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.268146 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.277048 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.286649 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.295890 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.295942 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.295956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.295977 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.295991 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.299619 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.315154 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.325374 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.340011 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.351301 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.360252 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.369620 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.379131 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.388780 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.398699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.399114 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.399225 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.399306 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.399373 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.402642 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.414002 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.501700 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.501739 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.501749 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.501763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.501773 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.603321 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.603349 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.603356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.603366 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.603375 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.705685 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.705715 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.705740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.705754 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.705764 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.808118 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.808184 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.808196 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.808218 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.808230 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.890118 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.890206 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:17 crc kubenswrapper[4591]: E1203 12:06:17.890242 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.890278 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:17 crc kubenswrapper[4591]: E1203 12:06:17.890404 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.890485 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:17 crc kubenswrapper[4591]: E1203 12:06:17.890577 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:17 crc kubenswrapper[4591]: E1203 12:06:17.890642 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.910244 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.910280 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.910292 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.910305 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:17 crc kubenswrapper[4591]: I1203 12:06:17.910316 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:17Z","lastTransitionTime":"2025-12-03T12:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.012021 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.012054 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.012078 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.012092 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.012101 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.113603 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.113648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.113660 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.113676 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.113688 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.214937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.214985 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.214997 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.215017 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.215030 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.215057 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/3.log" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.215766 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/2.log" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.218027 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" exitCode=1 Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.218092 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.218152 4591 scope.go:117] "RemoveContainer" containerID="c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.219141 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:06:18 crc kubenswrapper[4591]: E1203 12:06:18.219372 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.238444 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.249928 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.261128 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.271566 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.281094 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.292464 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.303092 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.311310 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.317503 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.317538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.317551 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.317567 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.317580 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.320188 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.328751 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.335813 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.347865 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.357252 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.366670 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.374714 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.383004 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.397442 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1e217f7a24bb72644352c19a815d32ec03efb8b0df6ccf1f6197e8d681dad72\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:05:50Z\\\",\\\"message\\\":\\\"ns: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:05:50Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:05:50.686356 6238 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1203 12:05:50.686346 6238 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-diagnostics/network-check-target]} name:Service_openshift-network-diagnostics/network-check-target_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.219:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7594bb65-e742-44b3-a975-d639b1128be5}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:05:50.686361 6238 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 12:05:50.686370 6238 model_client\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:17Z\\\",\\\"message\\\":\\\"Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.591896 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.592031 6605 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-5pvst in node crc\\\\nI1203 12:06:17.592038 6605 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-5pvst after 0 failed attempt(s)\\\\nI1203 12:06:17.592042 6605 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-5pvst\\\\nI1203 12:06:17.591566 6605 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 12:06:17.592048 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:06:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.406439 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.420024 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.420056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.420083 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.420100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.420109 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.522403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.522438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.522448 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.522468 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.522483 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.624233 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.624263 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.624273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.624286 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.624295 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.726272 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.726313 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.726323 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.726343 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.726355 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.827739 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.827775 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.827785 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.827801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.827811 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.930020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.930054 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.930085 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.930100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:18 crc kubenswrapper[4591]: I1203 12:06:18.930112 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:18Z","lastTransitionTime":"2025-12-03T12:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.032396 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.032443 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.032453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.032468 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.032478 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.134465 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.134497 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.134525 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.134542 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.134552 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.221853 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/3.log" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.230604 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:06:19 crc kubenswrapper[4591]: E1203 12:06:19.231705 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.236826 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.236896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.236910 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.236928 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.236942 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.241207 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.251455 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.260675 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.271831 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.279845 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.289578 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.296843 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.304482 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.313658 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.322952 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.331001 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.338686 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.338708 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.338730 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.338744 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.338753 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.339665 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.349175 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.358588 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.371504 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:17Z\\\",\\\"message\\\":\\\"Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.591896 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.592031 6605 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-5pvst in node crc\\\\nI1203 12:06:17.592038 6605 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-5pvst after 0 failed attempt(s)\\\\nI1203 12:06:17.592042 6605 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-5pvst\\\\nI1203 12:06:17.591566 6605 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 12:06:17.592048 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:06:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.380216 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.388328 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.402416 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.441182 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.441210 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.441222 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.441241 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.441254 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.543018 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.543061 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.543093 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.543109 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.543119 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.644759 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.644790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.644801 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.644816 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.644826 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.745951 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.745988 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.746043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.746061 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.746090 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.848472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.848663 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.848745 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.848829 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.848901 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.889872 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.889890 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.889903 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:19 crc kubenswrapper[4591]: E1203 12:06:19.889966 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:19 crc kubenswrapper[4591]: E1203 12:06:19.890096 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:19 crc kubenswrapper[4591]: E1203 12:06:19.890289 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.890471 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:19 crc kubenswrapper[4591]: E1203 12:06:19.890676 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.901367 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.951020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.951088 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.951101 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.951117 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:19 crc kubenswrapper[4591]: I1203 12:06:19.951129 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:19Z","lastTransitionTime":"2025-12-03T12:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.053389 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.053443 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.053458 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.053472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.053481 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.155938 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.155986 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.156000 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.156021 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.156035 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.258110 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.258145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.258156 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.258173 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.258185 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.360402 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.360436 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.360447 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.360460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.360469 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.462056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.462091 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.462102 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.462111 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.462118 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.563978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.564004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.564013 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.564022 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.564029 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.665446 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.665479 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.665487 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.665500 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.665511 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.767306 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.767329 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.767338 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.767346 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.767352 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.869407 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.869441 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.869451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.869463 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.869493 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.971430 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.971483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.971495 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.971514 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:20 crc kubenswrapper[4591]: I1203 12:06:20.971526 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:20Z","lastTransitionTime":"2025-12-03T12:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.073535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.073569 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.073578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.073590 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.073599 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.174838 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.174881 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.174892 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.174909 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.174924 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.276957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.276996 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.277008 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.277023 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.277035 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.379289 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.379460 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.379538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.379613 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.379683 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.482010 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.482135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.482310 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.482388 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.482450 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.585150 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.585196 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.585207 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.585223 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.585240 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.687399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.687431 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.687441 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.687454 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.687466 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.790161 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.790197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.790209 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.790225 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.790241 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.889768 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.889833 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.889857 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:21 crc kubenswrapper[4591]: E1203 12:06:21.889907 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.889920 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:21 crc kubenswrapper[4591]: E1203 12:06:21.889997 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:21 crc kubenswrapper[4591]: E1203 12:06:21.890043 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:21 crc kubenswrapper[4591]: E1203 12:06:21.890158 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.891798 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.891826 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.891836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.891851 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.891859 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.993756 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.993790 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.993802 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.993817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:21 crc kubenswrapper[4591]: I1203 12:06:21.993828 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:21Z","lastTransitionTime":"2025-12-03T12:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.095099 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.095133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.095143 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.095157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.095169 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.196859 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.196884 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.196893 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.196922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.196930 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.299255 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.299297 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.299310 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.299324 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.299333 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.401076 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.401110 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.401120 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.401132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.401142 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.502887 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.502917 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.502927 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.502937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.502946 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.604883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.604905 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.604912 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.604922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.604931 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.706670 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.706698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.706717 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.706740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.706749 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.807837 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.807866 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.807876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.807886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.807894 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.900654 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.909639 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.909674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.909686 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.909699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.909717 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:22Z","lastTransitionTime":"2025-12-03T12:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.915409 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.924283 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.932494 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.941927 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.955876 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.963386 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.972981 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.980333 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72fecfb1-2b06-4dc7-a74f-322186694b9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0248c27276ad30013892a64d9af62fb7d4831802e820d201fc61d91dce82c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.989830 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:22 crc kubenswrapper[4591]: I1203 12:06:22.996385 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.004766 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.011456 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.011485 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.011493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.011506 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.011517 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.013585 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.022371 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.030645 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.038916 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.045623 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.053563 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.065460 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:17Z\\\",\\\"message\\\":\\\"Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.591896 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.592031 6605 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-5pvst in node crc\\\\nI1203 12:06:17.592038 6605 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-5pvst after 0 failed attempt(s)\\\\nI1203 12:06:17.592042 6605 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-5pvst\\\\nI1203 12:06:17.591566 6605 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 12:06:17.592048 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:06:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.113665 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.113698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.113717 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.113732 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.113741 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.215324 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.215356 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.215367 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.215382 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.215394 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.317250 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.317280 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.317289 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.317304 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.317314 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.418849 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.418878 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.418889 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.418900 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.418910 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.521057 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.521110 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.521119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.521130 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.521139 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.623018 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.623041 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.623049 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.623058 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.623081 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.724959 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.724982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.724991 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.725000 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.725008 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.755284 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.755428 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.755410397 +0000 UTC m=+145.182450177 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.826973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.826994 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.827003 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.827013 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.827020 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.855786 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.855823 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.855846 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.855867 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.855978 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.855991 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856001 4591 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856027 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.856020395 +0000 UTC m=+145.283060154 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856057 4591 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856103 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.856097179 +0000 UTC m=+145.283136949 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856151 4591 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856173 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.856166759 +0000 UTC m=+145.283206529 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856185 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856205 4591 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856214 4591 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.856249 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.856239486 +0000 UTC m=+145.283279256 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.889609 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.889638 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.889610 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.889710 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.889609 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.889796 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.889886 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:23 crc kubenswrapper[4591]: E1203 12:06:23.889910 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.928276 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.928302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.928311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.928321 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:23 crc kubenswrapper[4591]: I1203 12:06:23.928329 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:23Z","lastTransitionTime":"2025-12-03T12:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.030293 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.030316 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.030344 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.030355 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.030365 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.057123 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.057147 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.057155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.057164 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.057172 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.066525 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.068831 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.068868 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.068879 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.068891 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.068901 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.077254 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.079322 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.079349 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.079359 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.079371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.079379 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.087700 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.090093 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.090123 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.090132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.090145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.090155 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.097922 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.100176 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.100228 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.100238 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.100260 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.100274 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.108311 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:24 crc kubenswrapper[4591]: E1203 12:06:24.108418 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.132969 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.133129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.133223 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.133319 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.133396 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.235285 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.235314 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.235324 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.235334 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.235344 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.336909 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.336934 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.336945 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.336956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.336964 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.438776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.438807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.438816 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.438828 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.438835 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.540953 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.540979 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.540990 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.541004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.541016 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.643028 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.643056 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.643089 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.643113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.643126 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.745104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.745135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.745145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.745157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.745167 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.847048 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.847116 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.847128 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.847152 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.847165 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.948628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.948665 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.948676 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.948690 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:24 crc kubenswrapper[4591]: I1203 12:06:24.948712 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:24Z","lastTransitionTime":"2025-12-03T12:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.051091 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.051138 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.051152 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.051170 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.051181 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.152696 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.152746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.152758 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.152774 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.152789 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.254542 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.254570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.254580 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.254592 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.254603 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.355981 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.356022 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.356031 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.356048 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.356058 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.457533 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.457563 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.457574 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.457587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.457599 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.559358 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.559387 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.559398 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.559413 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.559423 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.661211 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.661247 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.661256 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.661268 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.661276 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.762622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.762650 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.762660 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.762670 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.762680 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.864566 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.864593 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.864604 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.864616 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.864625 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.890425 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.890467 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.890489 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:25 crc kubenswrapper[4591]: E1203 12:06:25.890517 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:25 crc kubenswrapper[4591]: E1203 12:06:25.890590 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.890609 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:25 crc kubenswrapper[4591]: E1203 12:06:25.890732 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:25 crc kubenswrapper[4591]: E1203 12:06:25.890782 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.966591 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.966619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.966629 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.966642 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:25 crc kubenswrapper[4591]: I1203 12:06:25.966654 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:25Z","lastTransitionTime":"2025-12-03T12:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.068101 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.068133 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.068142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.068155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.068171 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.170577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.170620 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.170630 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.170646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.170657 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.272749 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.272778 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.272786 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.272797 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.272805 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.374079 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.374107 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.374117 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.374129 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.374137 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.476023 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.476051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.476059 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.476087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.476097 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.577548 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.577575 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.577584 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.577595 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.577602 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.679834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.679860 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.679877 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.679886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.679893 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.781795 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.781817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.781825 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.781852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.781861 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.883434 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.883451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.883458 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.883467 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.883492 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.985042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.985085 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.985102 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.985112 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:26 crc kubenswrapper[4591]: I1203 12:06:26.985120 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:26Z","lastTransitionTime":"2025-12-03T12:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.087059 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.087330 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.087399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.087483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.087554 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.190012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.190100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.190119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.190131 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.190139 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.292590 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.292626 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.292634 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.292646 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.292655 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.394555 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.394716 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.394728 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.394746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.394755 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.496350 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.496393 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.496404 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.496419 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.496428 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.598526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.598551 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.598559 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.598574 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.598583 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.700526 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.700550 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.700558 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.700570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.700578 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.801659 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.801682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.801700 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.801710 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.801719 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.889681 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.889725 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.889749 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.889699 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:27 crc kubenswrapper[4591]: E1203 12:06:27.889768 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:27 crc kubenswrapper[4591]: E1203 12:06:27.889800 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:27 crc kubenswrapper[4591]: E1203 12:06:27.889840 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:27 crc kubenswrapper[4591]: E1203 12:06:27.889869 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.903572 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.903596 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.903614 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.903624 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:27 crc kubenswrapper[4591]: I1203 12:06:27.903631 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:27Z","lastTransitionTime":"2025-12-03T12:06:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.005724 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.005757 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.005767 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.005778 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.005788 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.107607 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.107735 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.107815 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.107878 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.107942 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.210947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.211005 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.211016 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.211038 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.211052 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.313603 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.313644 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.313655 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.313667 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.313677 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.415869 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.415905 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.415914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.415927 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.415936 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.518012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.518051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.518088 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.518104 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.518113 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.619947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.619984 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.619994 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.620011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.620023 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.721330 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.721367 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.721380 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.721395 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.721405 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.823614 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.823657 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.823668 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.823682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.823703 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.926060 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.926144 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.926154 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.926176 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:28 crc kubenswrapper[4591]: I1203 12:06:28.926195 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:28Z","lastTransitionTime":"2025-12-03T12:06:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.028362 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.028403 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.028414 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.028437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.028448 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.130810 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.130842 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.130852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.130863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.130871 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.232818 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.232852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.232864 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.232876 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.232884 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.335042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.335106 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.335117 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.335137 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.335149 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.437345 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.437383 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.437394 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.437408 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.437422 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.539665 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.539740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.539753 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.539775 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.539789 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.642408 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.642451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.642461 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.642472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.642481 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.745080 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.745119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.745132 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.745146 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.745155 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.847641 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.847673 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.847691 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.847704 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.847713 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.890522 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.890535 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.890543 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.890522 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:29 crc kubenswrapper[4591]: E1203 12:06:29.890644 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:29 crc kubenswrapper[4591]: E1203 12:06:29.890716 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:29 crc kubenswrapper[4591]: E1203 12:06:29.890785 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:29 crc kubenswrapper[4591]: E1203 12:06:29.890872 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.950389 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.950433 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.950445 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.950464 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:29 crc kubenswrapper[4591]: I1203 12:06:29.950481 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:29Z","lastTransitionTime":"2025-12-03T12:06:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.052334 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.052371 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.052382 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.052396 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.052409 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.154437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.154483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.154496 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.154510 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.154522 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.256670 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.256727 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.256738 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.256752 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.256761 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.358774 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.358815 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.358825 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.358841 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.358852 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.460956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.460992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.461002 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.461016 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.461025 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.562409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.562445 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.562453 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.562466 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.562475 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.664387 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.664420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.664432 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.664444 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.664455 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.766598 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.766622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.766630 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.766639 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.766648 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.867992 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.868031 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.868042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.868055 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.868083 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.969825 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.969857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.969866 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.969877 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:30 crc kubenswrapper[4591]: I1203 12:06:30.969884 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:30Z","lastTransitionTime":"2025-12-03T12:06:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.071888 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.071912 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.071921 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.071929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.071936 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.174020 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.174042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.174050 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.174061 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.174083 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.275840 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.275871 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.275882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.275896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.275905 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.377707 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.377737 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.377746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.377758 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.377766 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.479544 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.479573 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.479587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.479597 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.479606 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.581333 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.581370 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.581380 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.581393 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.581403 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.683233 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.683262 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.683273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.683287 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.683296 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.784964 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.785012 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.785022 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.785033 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.785042 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.887967 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.888014 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.888025 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.888038 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.888047 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.890267 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.890310 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.890320 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.890267 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:31 crc kubenswrapper[4591]: E1203 12:06:31.890363 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:31 crc kubenswrapper[4591]: E1203 12:06:31.890479 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:31 crc kubenswrapper[4591]: E1203 12:06:31.890538 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:31 crc kubenswrapper[4591]: E1203 12:06:31.890594 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.990196 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.990232 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.990243 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.990259 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:31 crc kubenswrapper[4591]: I1203 12:06:31.990271 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:31Z","lastTransitionTime":"2025-12-03T12:06:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.092447 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.092502 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.092512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.092535 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.092550 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.194021 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.194051 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.194061 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.194092 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.194103 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.295857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.295883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.295893 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.295906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.295915 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.397879 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.397929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.397938 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.397950 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.397960 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.500306 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.500336 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.500349 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.500361 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.500373 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.602010 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.602041 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.602052 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.602082 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.602092 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.703548 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.703578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.703587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.703622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.703632 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.804591 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.804615 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.804624 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.804635 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.804643 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.899196 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff8424d361923b4e77a2fe010f21db9282f478bb355475d32c4532245fafb719\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e610384994836a921caf53fb09be612fca8bf0c8eb8c1ca7beb731e8c3b3e2be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.906100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.906145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.906155 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.906167 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.906175 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:32Z","lastTransitionTime":"2025-12-03T12:06:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.914847 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"362420fc-42a2-444d-b450-49ff1c0eb5c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:17Z\\\",\\\"message\\\":\\\"Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.591896 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:06:17.592031 6605 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-5pvst in node crc\\\\nI1203 12:06:17.592038 6605 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-5pvst after 0 failed attempt(s)\\\\nI1203 12:06:17.592042 6605 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-5pvst\\\\nI1203 12:06:17.591566 6605 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1203 12:06:17.592048 6605 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:06:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kw8cm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-k4dxv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.924159 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecc55965-3a84-4f4c-9d5e-2a3813584b31\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdc34eb06479541cf4b62ded86a69dd9802f48e0eb98922908c1b28d190da25f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59130911b133e1ef30979d54daae42ab73f8fffc0fbb677088665eb3b6ee294e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c199d5a969c28fc12a4bbf2b81564072e170219c2d7944f68b13c9d66f58e74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1156b3c0e0cadecfb9320bec454ff22cb94ecf082cb2576c540042347278076f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.939488 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4e2208d6-089c-40b2-a671-15fd1edc23fb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77b3004963bc72ce9058314ae0d6eae2a1d5bf454456379a42833b64d9ed5762\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://543633e655212525d91ad4b229f5253065c5ff784e0518c859b3d79f939fad4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8064aaddf29286102d48a47bd412377763d81047de77367942a0e5eccd4abe4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92521fa0115ea8ad4caeaf8a71294ce0baf080aec9db0253e54fed12b30041a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed25435bfceb73bb0d6d1ad969477710bb55dbd274e799a1d4b8e12b6b0f6a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81446c5c48bd7802e883c5b120cdcdaae249bd4733e4d84ccbf26db807d60c22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7969010638c0c851a7c6fedcea5dc63abb06883dd1faf5d0f5651714b264c733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aeb4c61f8f12467e5c58290b64d6b4cb8784a393b60d5763e3637979634f4058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.952045 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.960045 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.972809 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-5pvst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d56ae362-837a-44a3-8ec3-d082ac52977b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9e52246886c9493885a5dee5a89c5bd731cde7f850c66420cfc52393726078f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aeb4fc36b73967db22d1510c3ab69a838ccd83945e2691e87833f58d711db57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4290c0c26f6ed9ec4173aefdd5ab4db917a238c193f571806398b6e2b99e79a4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeab25035964a9599bbd769d7cf81dd4e8885d2d6e86e0f3510b5e28278e309f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dea05156f65d1e3fa814010e4ada08a2a66550a23d49825c03b78c098ae63962\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5254f960b9cc4bc5eb6120215d1414ba57936e75a1d216cf608c6794a72b4f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cecd8a83fc4776e1bf5de62510a2508aa7bf0f4136f74ea1af5d809221a4c500\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkg7m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-5pvst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.981605 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5drvq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cd44649-dee5-4a99-8123-059f30fd0c1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6bgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5drvq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.991357 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbb6d2e4-c391-4660-83f0-555baff34724\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63ff388080f0edccbf332d15435dc1d8c914dfe073dc660db90e0b43f748cd6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ec5c5965cc539c94203d2e2b65037064cad7f5c652bf50af5d87c6f58878f9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd526c20597d50815908c601fbeabe22a2b1650544fb091b049f2c177fdb9bbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:32 crc kubenswrapper[4591]: I1203 12:06:32.999355 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72fecfb1-2b06-4dc7-a74f-322186694b9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0248c27276ad30013892a64d9af62fb7d4831802e820d201fc61d91dce82c2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53a74d0eb43ca14a15a3de480703cf9e5c77a34ffa355eac97aa5bcfc2b1b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:32Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.007643 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.007684 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.007695 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.007712 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.007724 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.009580 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9091b9-cf99-4097-a1a6-98adef19bc6f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.019161 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5b9ef384a5662d45f732499bb0990cc5d86541f1d58cdaeff9e5b7a81374cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.028118 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96827b8d-1081-4acd-a2db-c2fa3a87b42a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c03cd45878d2f8e24b20fa5974cfe024cabcaa662dc554a2548ef431a1e7fdc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5qrqj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mnzzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.038147 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2qprr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:06:11Z\\\",\\\"message\\\":\\\"2025-12-03T12:05:25+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7\\\\n2025-12-03T12:05:25+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5d21f7cc-e662-48e6-883e-5d9d81b487c7 to /host/opt/cni/bin/\\\\n2025-12-03T12:05:26Z [verbose] multus-daemon started\\\\n2025-12-03T12:05:26Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:06:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:05:25Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:06:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlrpg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2qprr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.046723 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4da01412-45d4-4dcf-805d-f75dfcfae828\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://128d1df8bd926246e6d6bb485290f788e2da717e145625487f6fe9c7ec36bdad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96c32ddb99faeb9bd617cb800ebb051d09465f4d76c9a6aefa90e8caaa3883f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fpgw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgvc7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.055372 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:19Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.068317 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:22Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80864aea0c6e5226e3d7cc68542fd5fd8808ff88b54f3ef3fa6793c869ef6d04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.075398 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bshxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fad8ba4a-fd4a-40c7-b5ca-94b9c286a48f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea206a20e21683d5ba940ef41fe22ab222446f58a4341c80fad25966b1699042\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-75bgw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bshxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.082097 4591 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rwr66" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d41611d-4a75-440e-9c67-4222a9b4fd34\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:05:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ad4639b6c4ca7800ab9212832f1e2a02e27552c827aadece7216ba2f4a39ccc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:05:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dtjfn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:05:24Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rwr66\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.109966 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.110001 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.110011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.110025 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.110035 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.214582 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.214634 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.214648 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.214682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.214697 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.316890 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.316953 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.316977 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.316999 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.317020 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.419213 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.419253 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.419263 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.419279 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.419291 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.521770 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.521802 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.521811 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.521824 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.521834 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.623694 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.623779 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.623791 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.623818 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.623831 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.725449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.725489 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.725501 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.725518 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.725530 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.827810 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.827831 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.827842 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.827852 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.827868 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.889856 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.889945 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.889945 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.890155 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:33 crc kubenswrapper[4591]: E1203 12:06:33.890276 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.890373 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:06:33 crc kubenswrapper[4591]: E1203 12:06:33.890378 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:33 crc kubenswrapper[4591]: E1203 12:06:33.890484 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:33 crc kubenswrapper[4591]: E1203 12:06:33.890495 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:06:33 crc kubenswrapper[4591]: E1203 12:06:33.890551 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.929688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.929728 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.929738 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.929753 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:33 crc kubenswrapper[4591]: I1203 12:06:33.929765 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:33Z","lastTransitionTime":"2025-12-03T12:06:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.032253 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.032293 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.032302 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.032317 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.032328 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.134339 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.134381 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.134392 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.134405 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.134420 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.236579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.236619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.236634 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.236657 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.236667 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.245531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.245578 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.245588 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.245604 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.245614 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.254893 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.257977 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.258013 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.258039 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.258054 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.258086 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.267188 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.269516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.269545 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.269555 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.269566 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.269596 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.277476 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.279772 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.279831 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.279842 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.279853 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.279863 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.287511 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.289889 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.289914 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.289923 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.289933 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.289940 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.297798 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:06:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b9815d2a-4e84-4a36-9a52-7c608e704615\\\",\\\"systemUUID\\\":\\\"ad5d5022-d9e3-4192-a6ae-548c1b27699e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:06:34Z is after 2025-08-24T17:21:41Z" Dec 03 12:06:34 crc kubenswrapper[4591]: E1203 12:06:34.297920 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.341764 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.341796 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.341810 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.341823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.341832 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.443863 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.443900 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.443910 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.443924 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.443934 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.545551 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.545584 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.545593 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.545606 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.545616 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.647422 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.647449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.647522 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.647538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.647546 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.749172 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.749205 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.749215 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.749228 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.749239 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.851341 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.851364 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.851373 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.851383 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.851392 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.953038 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.953083 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.953093 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.953105 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:34 crc kubenswrapper[4591]: I1203 12:06:34.953114 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:34Z","lastTransitionTime":"2025-12-03T12:06:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.054364 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.054388 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.054397 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.054409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.054420 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.156488 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.156513 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.156522 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.156531 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.156600 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.258300 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.258325 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.258351 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.258363 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.258372 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.360147 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.360174 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.360185 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.360196 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.360205 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.461480 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.461511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.461520 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.461534 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.461544 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.562913 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.562946 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.562956 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.562974 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.562985 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.665058 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.665100 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.665109 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.665123 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.665133 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.766702 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.766731 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.766740 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.766752 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.766761 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.868970 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.869008 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.869019 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.869032 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.869043 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.889504 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.889531 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.889789 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:35 crc kubenswrapper[4591]: E1203 12:06:35.889901 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:35 crc kubenswrapper[4591]: E1203 12:06:35.889995 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:35 crc kubenswrapper[4591]: E1203 12:06:35.890160 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.890488 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:35 crc kubenswrapper[4591]: E1203 12:06:35.890583 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.974178 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.974243 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.974255 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.974288 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:35 crc kubenswrapper[4591]: I1203 12:06:35.974511 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:35Z","lastTransitionTime":"2025-12-03T12:06:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.077587 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.077617 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.077625 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.077641 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.077650 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.179701 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.179734 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.179744 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.179761 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.179786 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.282029 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.282055 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.282079 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.282089 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.282098 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.383461 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.383483 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.383492 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.383563 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.383575 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.485186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.485221 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.485232 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.485245 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.485255 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.587184 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.587409 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.587470 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.587554 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.587610 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.689954 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.689983 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.689993 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.690004 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.690012 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.791777 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.791813 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.791823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.791837 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.791847 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.893489 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.893533 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.893543 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.893552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.893561 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.995575 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.995626 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.995638 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.995653 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:36 crc kubenswrapper[4591]: I1203 12:06:36.995675 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:36Z","lastTransitionTime":"2025-12-03T12:06:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.096817 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.096932 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.097003 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.097090 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.097156 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.198602 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.198834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.198957 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.199103 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.199159 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.301043 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.301124 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.301136 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.301157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.301170 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.403886 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.403924 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.403932 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.403947 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.403957 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.505564 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.505615 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.505627 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.505653 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.505676 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.608113 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.608152 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.608164 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.608184 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.608197 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.710142 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.710189 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.710202 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.710217 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.710233 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.811931 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.811978 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.811989 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.812008 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.812019 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.889783 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.889907 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.889942 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:37 crc kubenswrapper[4591]: E1203 12:06:37.889999 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.890018 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:37 crc kubenswrapper[4591]: E1203 12:06:37.890179 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:37 crc kubenswrapper[4591]: E1203 12:06:37.890305 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:37 crc kubenswrapper[4591]: E1203 12:06:37.890391 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.913593 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.913626 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.913634 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.913669 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:37 crc kubenswrapper[4591]: I1203 12:06:37.913680 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:37Z","lastTransitionTime":"2025-12-03T12:06:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.015498 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.015539 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.015554 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.015570 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.015580 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.117800 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.117834 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.117845 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.117857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.117869 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.219192 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.219226 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.219236 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.219247 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.219255 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.321286 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.321324 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.321336 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.321350 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.321360 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.423271 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.423316 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.423329 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.423355 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.423368 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.524745 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.524793 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.524803 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.524814 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.524821 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.626461 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.626512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.626524 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.626540 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.626551 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.728861 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.728894 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.728906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.728919 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.728931 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.830641 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.830698 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.830709 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.830726 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.830739 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.933046 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.933102 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.933111 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.933128 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:38 crc kubenswrapper[4591]: I1203 12:06:38.933139 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:38Z","lastTransitionTime":"2025-12-03T12:06:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.035357 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.035387 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.035399 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.035411 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.035420 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.137273 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.137312 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.137326 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.137339 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.137352 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.239126 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.239159 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.239169 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.239181 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.239191 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.341271 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.341307 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.341317 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.341333 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.341341 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.443776 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.443808 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.443818 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.443832 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.443840 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.545883 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.545909 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.545918 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.545929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.545937 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.647897 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.648011 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.648087 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.648158 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.648239 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.749777 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.749823 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.749839 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.749857 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.749869 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.851423 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.851459 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.851471 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.851484 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.851493 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.890256 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.890274 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.890286 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.890256 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:39 crc kubenswrapper[4591]: E1203 12:06:39.890370 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:39 crc kubenswrapper[4591]: E1203 12:06:39.890446 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:39 crc kubenswrapper[4591]: E1203 12:06:39.890506 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:39 crc kubenswrapper[4591]: E1203 12:06:39.890570 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.953561 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.953609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.953618 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.953640 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:39 crc kubenswrapper[4591]: I1203 12:06:39.953657 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:39Z","lastTransitionTime":"2025-12-03T12:06:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.055095 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.055125 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.055135 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.055145 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.055155 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.157157 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.157186 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.157195 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.157206 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.157217 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.258688 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.258731 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.258746 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.258761 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.258772 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.361242 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.361271 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.361279 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.361290 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.361299 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.463449 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.463484 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.463493 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.463504 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.463513 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.565393 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.565415 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.565424 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.565437 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.565446 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.667565 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.667599 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.667608 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.667619 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.667629 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.769543 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.769571 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.769579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.769589 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.769596 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.871811 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.871836 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.871844 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.871854 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.871882 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.973741 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.973782 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.973793 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.973807 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:40 crc kubenswrapper[4591]: I1203 12:06:40.973819 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:40Z","lastTransitionTime":"2025-12-03T12:06:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.076266 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.076296 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.076305 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.076317 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.076325 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.177620 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.177654 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.177664 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.177674 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.177681 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.279624 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.279684 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.279699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.279716 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.279734 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.380929 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.380962 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.380971 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.380982 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.380989 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.482995 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.483023 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.483032 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.483042 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.483051 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.584662 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.584700 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.584711 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.584724 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.584732 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.686455 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.686491 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.686500 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.686514 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.686523 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.788317 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.788342 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.788350 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.788359 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.788366 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.889420 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.889451 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.889488 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:41 crc kubenswrapper[4591]: E1203 12:06:41.889585 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:41 crc kubenswrapper[4591]: E1203 12:06:41.889681 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.889699 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:41 crc kubenswrapper[4591]: E1203 12:06:41.889739 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:41 crc kubenswrapper[4591]: E1203 12:06:41.889867 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.890420 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.890447 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.890456 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.890469 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.890477 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.992429 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.992474 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.992490 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.992512 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:41 crc kubenswrapper[4591]: I1203 12:06:41.992527 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:41Z","lastTransitionTime":"2025-12-03T12:06:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.094140 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.094197 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.094456 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.094492 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.094501 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.196150 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.196176 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.196183 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.196192 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.196224 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.297516 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.297557 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.297566 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.297579 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.297590 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.320330 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:42 crc kubenswrapper[4591]: E1203 12:06:42.320422 4591 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:06:42 crc kubenswrapper[4591]: E1203 12:06:42.320468 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs podName:8cd44649-dee5-4a99-8123-059f30fd0c1b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:46.3204555 +0000 UTC m=+163.747495269 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs") pod "network-metrics-daemon-5drvq" (UID: "8cd44649-dee5-4a99-8123-059f30fd0c1b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.399059 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.399111 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.399119 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.399128 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.399136 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.501572 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.501600 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.501609 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.501622 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.501629 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.604134 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.604210 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.604220 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.604233 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.604243 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.705862 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.705888 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.705896 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.705906 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.705914 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.807605 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.807628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.807636 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.807652 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.807661 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.905666 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-5pvst" podStartSLOduration=78.905627102 podStartE2EDuration="1m18.905627102s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.905368737 +0000 UTC m=+100.332408508" watchObservedRunningTime="2025-12-03 12:06:42.905627102 +0000 UTC m=+100.332666862" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.909681 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.909725 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.909763 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.909783 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.909797 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:42Z","lastTransitionTime":"2025-12-03T12:06:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.936842 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=82.936819812 podStartE2EDuration="1m22.936819812s" podCreationTimestamp="2025-12-03 12:05:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.928979837 +0000 UTC m=+100.356019606" watchObservedRunningTime="2025-12-03 12:06:42.936819812 +0000 UTC m=+100.363859582" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.936993 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.936988749 podStartE2EDuration="23.936988749s" podCreationTimestamp="2025-12-03 12:06:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.936354699 +0000 UTC m=+100.363394469" watchObservedRunningTime="2025-12-03 12:06:42.936988749 +0000 UTC m=+100.364028519" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.948874 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=83.9488378 podStartE2EDuration="1m23.9488378s" podCreationTimestamp="2025-12-03 12:05:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.947696307 +0000 UTC m=+100.374736077" watchObservedRunningTime="2025-12-03 12:06:42.9488378 +0000 UTC m=+100.375877570" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.976513 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-2qprr" podStartSLOduration=78.976500814 podStartE2EDuration="1m18.976500814s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.976382322 +0000 UTC m=+100.403422092" watchObservedRunningTime="2025-12-03 12:06:42.976500814 +0000 UTC m=+100.403540575" Dec 03 12:06:42 crc kubenswrapper[4591]: I1203 12:06:42.988507 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgvc7" podStartSLOduration=77.988490139 podStartE2EDuration="1m17.988490139s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:42.988244147 +0000 UTC m=+100.415283917" watchObservedRunningTime="2025-12-03 12:06:42.988490139 +0000 UTC m=+100.415529908" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.012311 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.012351 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.012361 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.012376 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.012387 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.014401 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-bshxj" podStartSLOduration=79.014384973 podStartE2EDuration="1m19.014384973s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:43.013886779 +0000 UTC m=+100.440926539" watchObservedRunningTime="2025-12-03 12:06:43.014384973 +0000 UTC m=+100.441424743" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.029362 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-rwr66" podStartSLOduration=79.029345134 podStartE2EDuration="1m19.029345134s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:43.02174587 +0000 UTC m=+100.448785641" watchObservedRunningTime="2025-12-03 12:06:43.029345134 +0000 UTC m=+100.456384894" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.029826 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podStartSLOduration=79.029822801 podStartE2EDuration="1m19.029822801s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:43.029275594 +0000 UTC m=+100.456315364" watchObservedRunningTime="2025-12-03 12:06:43.029822801 +0000 UTC m=+100.456862571" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.091351 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=45.091330262 podStartE2EDuration="45.091330262s" podCreationTimestamp="2025-12-03 12:05:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:43.091219775 +0000 UTC m=+100.518259545" watchObservedRunningTime="2025-12-03 12:06:43.091330262 +0000 UTC m=+100.518370032" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.110371 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=78.110352136 podStartE2EDuration="1m18.110352136s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:43.110198037 +0000 UTC m=+100.537237808" watchObservedRunningTime="2025-12-03 12:06:43.110352136 +0000 UTC m=+100.537391916" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.114385 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.114438 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.114451 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.114472 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.114490 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.217567 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.217605 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.217616 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.217635 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.217654 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.319270 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.319309 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.319318 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.319333 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.319345 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.421359 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.421392 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.421402 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.421416 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.421428 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.523015 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.523044 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.523053 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.523080 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.523092 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.624973 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.625008 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.625017 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.625034 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.625053 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.727037 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.727079 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.727088 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.727098 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.727108 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.829546 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.829628 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.829665 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.829699 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.829722 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.889616 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.889680 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.889712 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.889683 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:43 crc kubenswrapper[4591]: E1203 12:06:43.889928 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:43 crc kubenswrapper[4591]: E1203 12:06:43.890446 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:43 crc kubenswrapper[4591]: E1203 12:06:43.890556 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:43 crc kubenswrapper[4591]: E1203 12:06:43.890666 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.931935 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.931977 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.931987 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.932006 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:43 crc kubenswrapper[4591]: I1203 12:06:43.932020 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:43Z","lastTransitionTime":"2025-12-03T12:06:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.034538 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.034564 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.034574 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.034585 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.034596 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.136476 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.136511 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.136522 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.136533 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.136542 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.238682 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.238719 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.238730 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.238748 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.238762 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.340882 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.340922 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.340933 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.340951 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.340963 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.442661 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.442693 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.442703 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.442714 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.442723 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.517517 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.517552 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.517564 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.517577 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.517586 4591 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:06:44Z","lastTransitionTime":"2025-12-03T12:06:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.553404 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82"] Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.553919 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.555845 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.556206 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.556361 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.556438 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.642923 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c9bb81b-c590-4eec-b6f1-772ea641bed8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.642973 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c9bb81b-c590-4eec-b6f1-772ea641bed8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.642999 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.643196 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.643219 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c9bb81b-c590-4eec-b6f1-772ea641bed8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744211 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744273 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744296 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c9bb81b-c590-4eec-b6f1-772ea641bed8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744378 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c9bb81b-c590-4eec-b6f1-772ea641bed8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744403 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c9bb81b-c590-4eec-b6f1-772ea641bed8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744404 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.744368 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9c9bb81b-c590-4eec-b6f1-772ea641bed8-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.745309 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9c9bb81b-c590-4eec-b6f1-772ea641bed8-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.749110 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c9bb81b-c590-4eec-b6f1-772ea641bed8-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.759053 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c9bb81b-c590-4eec-b6f1-772ea641bed8-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2pb82\" (UID: \"9c9bb81b-c590-4eec-b6f1-772ea641bed8\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.866238 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" Dec 03 12:06:44 crc kubenswrapper[4591]: I1203 12:06:44.891038 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:06:44 crc kubenswrapper[4591]: E1203 12:06:44.891507 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-k4dxv_openshift-ovn-kubernetes(362420fc-42a2-444d-b450-49ff1c0eb5c2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.291708 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" event={"ID":"9c9bb81b-c590-4eec-b6f1-772ea641bed8","Type":"ContainerStarted","Data":"e198b84af36ebe68731a950f012f03f378e735e57651aa66b65bb84bb6f9c33e"} Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.292218 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" event={"ID":"9c9bb81b-c590-4eec-b6f1-772ea641bed8","Type":"ContainerStarted","Data":"5f4c7b0463d5f3a100cc2621890b86a236c92a3b55a5d21a4a2dc78a6256fe15"} Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.304998 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2pb82" podStartSLOduration=81.304976435 podStartE2EDuration="1m21.304976435s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:45.3040313 +0000 UTC m=+102.731071071" watchObservedRunningTime="2025-12-03 12:06:45.304976435 +0000 UTC m=+102.732016205" Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.890945 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.891016 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.891090 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:45 crc kubenswrapper[4591]: I1203 12:06:45.890957 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:45 crc kubenswrapper[4591]: E1203 12:06:45.891132 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:45 crc kubenswrapper[4591]: E1203 12:06:45.891228 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:45 crc kubenswrapper[4591]: E1203 12:06:45.891314 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:45 crc kubenswrapper[4591]: E1203 12:06:45.891372 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:47 crc kubenswrapper[4591]: I1203 12:06:47.889559 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:47 crc kubenswrapper[4591]: I1203 12:06:47.889559 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:47 crc kubenswrapper[4591]: I1203 12:06:47.889579 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:47 crc kubenswrapper[4591]: E1203 12:06:47.889718 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:47 crc kubenswrapper[4591]: E1203 12:06:47.889759 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:47 crc kubenswrapper[4591]: I1203 12:06:47.889787 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:47 crc kubenswrapper[4591]: E1203 12:06:47.889817 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:47 crc kubenswrapper[4591]: E1203 12:06:47.889895 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:49 crc kubenswrapper[4591]: I1203 12:06:49.890456 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:49 crc kubenswrapper[4591]: I1203 12:06:49.890511 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:49 crc kubenswrapper[4591]: I1203 12:06:49.890463 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:49 crc kubenswrapper[4591]: E1203 12:06:49.890589 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:49 crc kubenswrapper[4591]: E1203 12:06:49.890765 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:49 crc kubenswrapper[4591]: E1203 12:06:49.890882 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:49 crc kubenswrapper[4591]: I1203 12:06:49.891005 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:49 crc kubenswrapper[4591]: E1203 12:06:49.891113 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:51 crc kubenswrapper[4591]: I1203 12:06:51.889498 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:51 crc kubenswrapper[4591]: I1203 12:06:51.889634 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:51 crc kubenswrapper[4591]: I1203 12:06:51.889743 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:51 crc kubenswrapper[4591]: E1203 12:06:51.889785 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:51 crc kubenswrapper[4591]: E1203 12:06:51.889878 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:51 crc kubenswrapper[4591]: E1203 12:06:51.889972 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:51 crc kubenswrapper[4591]: I1203 12:06:51.890642 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:51 crc kubenswrapper[4591]: E1203 12:06:51.890827 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:53 crc kubenswrapper[4591]: I1203 12:06:53.890118 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:53 crc kubenswrapper[4591]: I1203 12:06:53.890157 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:53 crc kubenswrapper[4591]: I1203 12:06:53.890146 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:53 crc kubenswrapper[4591]: E1203 12:06:53.890230 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:53 crc kubenswrapper[4591]: I1203 12:06:53.890120 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:53 crc kubenswrapper[4591]: E1203 12:06:53.890348 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:53 crc kubenswrapper[4591]: E1203 12:06:53.890474 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:53 crc kubenswrapper[4591]: E1203 12:06:53.890555 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:55 crc kubenswrapper[4591]: I1203 12:06:55.890179 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:55 crc kubenswrapper[4591]: I1203 12:06:55.890179 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:55 crc kubenswrapper[4591]: E1203 12:06:55.891045 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:55 crc kubenswrapper[4591]: I1203 12:06:55.890316 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:55 crc kubenswrapper[4591]: E1203 12:06:55.891155 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:55 crc kubenswrapper[4591]: I1203 12:06:55.890346 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:55 crc kubenswrapper[4591]: E1203 12:06:55.891360 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:55 crc kubenswrapper[4591]: E1203 12:06:55.891263 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.323752 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/1.log" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.324674 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/0.log" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.324779 4591 generic.go:334] "Generic (PLEG): container finished" podID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" containerID="d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba" exitCode=1 Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.324853 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerDied","Data":"d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba"} Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.324927 4591 scope.go:117] "RemoveContainer" containerID="dddd419d5ebe31bbc45d376a91393bd6e7c65c7fd53dc67638ee3af3ae535f2f" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.325292 4591 scope.go:117] "RemoveContainer" containerID="d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba" Dec 03 12:06:57 crc kubenswrapper[4591]: E1203 12:06:57.325493 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-2qprr_openshift-multus(19d5b224-0f8a-49a3-84f4-f2c0ef74fda4)\"" pod="openshift-multus/multus-2qprr" podUID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.889800 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:57 crc kubenswrapper[4591]: E1203 12:06:57.889936 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.890275 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.890306 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:57 crc kubenswrapper[4591]: I1203 12:06:57.890356 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:57 crc kubenswrapper[4591]: E1203 12:06:57.890493 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:06:57 crc kubenswrapper[4591]: E1203 12:06:57.890611 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:57 crc kubenswrapper[4591]: E1203 12:06:57.890709 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:58 crc kubenswrapper[4591]: I1203 12:06:58.329192 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/1.log" Dec 03 12:06:58 crc kubenswrapper[4591]: I1203 12:06:58.890647 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.334274 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/3.log" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.336945 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerStarted","Data":"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e"} Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.337345 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.360809 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podStartSLOduration=95.360792414 podStartE2EDuration="1m35.360792414s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:06:59.359994437 +0000 UTC m=+116.787034207" watchObservedRunningTime="2025-12-03 12:06:59.360792414 +0000 UTC m=+116.787832185" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.574163 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5drvq"] Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.574416 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:06:59 crc kubenswrapper[4591]: E1203 12:06:59.574535 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.889958 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.890039 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:06:59 crc kubenswrapper[4591]: E1203 12:06:59.890094 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:06:59 crc kubenswrapper[4591]: I1203 12:06:59.890049 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:06:59 crc kubenswrapper[4591]: E1203 12:06:59.890214 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:06:59 crc kubenswrapper[4591]: E1203 12:06:59.890351 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:00 crc kubenswrapper[4591]: I1203 12:07:00.889487 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:00 crc kubenswrapper[4591]: E1203 12:07:00.889824 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:01 crc kubenswrapper[4591]: I1203 12:07:01.890210 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:01 crc kubenswrapper[4591]: I1203 12:07:01.890297 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:01 crc kubenswrapper[4591]: I1203 12:07:01.890309 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:01 crc kubenswrapper[4591]: E1203 12:07:01.890405 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:01 crc kubenswrapper[4591]: E1203 12:07:01.890542 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:01 crc kubenswrapper[4591]: E1203 12:07:01.890631 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:02 crc kubenswrapper[4591]: I1203 12:07:02.890189 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:02 crc kubenswrapper[4591]: E1203 12:07:02.890953 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:02 crc kubenswrapper[4591]: E1203 12:07:02.924279 4591 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 12:07:02 crc kubenswrapper[4591]: E1203 12:07:02.941636 4591 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:07:03 crc kubenswrapper[4591]: I1203 12:07:03.890560 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:03 crc kubenswrapper[4591]: I1203 12:07:03.890574 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:03 crc kubenswrapper[4591]: E1203 12:07:03.890706 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:03 crc kubenswrapper[4591]: I1203 12:07:03.890561 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:03 crc kubenswrapper[4591]: E1203 12:07:03.891104 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:03 crc kubenswrapper[4591]: E1203 12:07:03.891140 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:03 crc kubenswrapper[4591]: I1203 12:07:03.962914 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:07:04 crc kubenswrapper[4591]: I1203 12:07:04.889808 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:04 crc kubenswrapper[4591]: E1203 12:07:04.889982 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:05 crc kubenswrapper[4591]: I1203 12:07:05.890261 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:05 crc kubenswrapper[4591]: I1203 12:07:05.890290 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:05 crc kubenswrapper[4591]: E1203 12:07:05.890369 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:05 crc kubenswrapper[4591]: I1203 12:07:05.890261 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:05 crc kubenswrapper[4591]: E1203 12:07:05.890449 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:05 crc kubenswrapper[4591]: E1203 12:07:05.890503 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:06 crc kubenswrapper[4591]: I1203 12:07:06.889733 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:06 crc kubenswrapper[4591]: E1203 12:07:06.889914 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:07 crc kubenswrapper[4591]: I1203 12:07:07.890335 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:07 crc kubenswrapper[4591]: I1203 12:07:07.890383 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:07 crc kubenswrapper[4591]: I1203 12:07:07.890393 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:07 crc kubenswrapper[4591]: E1203 12:07:07.890466 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:07 crc kubenswrapper[4591]: E1203 12:07:07.890666 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:07 crc kubenswrapper[4591]: E1203 12:07:07.890831 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:07 crc kubenswrapper[4591]: E1203 12:07:07.943310 4591 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:07:08 crc kubenswrapper[4591]: I1203 12:07:08.889604 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:08 crc kubenswrapper[4591]: E1203 12:07:08.889738 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:09 crc kubenswrapper[4591]: I1203 12:07:09.889943 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:09 crc kubenswrapper[4591]: I1203 12:07:09.889943 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:09 crc kubenswrapper[4591]: E1203 12:07:09.890486 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:09 crc kubenswrapper[4591]: I1203 12:07:09.889995 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:09 crc kubenswrapper[4591]: E1203 12:07:09.890524 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:09 crc kubenswrapper[4591]: E1203 12:07:09.890781 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:10 crc kubenswrapper[4591]: I1203 12:07:10.890359 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:10 crc kubenswrapper[4591]: E1203 12:07:10.890490 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:11 crc kubenswrapper[4591]: I1203 12:07:11.889493 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:11 crc kubenswrapper[4591]: I1203 12:07:11.889567 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:11 crc kubenswrapper[4591]: E1203 12:07:11.889633 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:11 crc kubenswrapper[4591]: I1203 12:07:11.889649 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:11 crc kubenswrapper[4591]: E1203 12:07:11.889843 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:11 crc kubenswrapper[4591]: E1203 12:07:11.890024 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:11 crc kubenswrapper[4591]: I1203 12:07:11.890307 4591 scope.go:117] "RemoveContainer" containerID="d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba" Dec 03 12:07:12 crc kubenswrapper[4591]: I1203 12:07:12.375619 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/1.log" Dec 03 12:07:12 crc kubenswrapper[4591]: I1203 12:07:12.375919 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerStarted","Data":"c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e"} Dec 03 12:07:12 crc kubenswrapper[4591]: I1203 12:07:12.890209 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:12 crc kubenswrapper[4591]: E1203 12:07:12.891680 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:12 crc kubenswrapper[4591]: E1203 12:07:12.944353 4591 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:07:13 crc kubenswrapper[4591]: I1203 12:07:13.889727 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:13 crc kubenswrapper[4591]: I1203 12:07:13.889763 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:13 crc kubenswrapper[4591]: I1203 12:07:13.889785 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:13 crc kubenswrapper[4591]: E1203 12:07:13.889916 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:13 crc kubenswrapper[4591]: E1203 12:07:13.890051 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:13 crc kubenswrapper[4591]: E1203 12:07:13.890125 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:14 crc kubenswrapper[4591]: I1203 12:07:14.889710 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:14 crc kubenswrapper[4591]: E1203 12:07:14.889848 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:15 crc kubenswrapper[4591]: I1203 12:07:15.889787 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:15 crc kubenswrapper[4591]: I1203 12:07:15.889821 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:15 crc kubenswrapper[4591]: I1203 12:07:15.889787 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:15 crc kubenswrapper[4591]: E1203 12:07:15.889924 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:15 crc kubenswrapper[4591]: E1203 12:07:15.889991 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:15 crc kubenswrapper[4591]: E1203 12:07:15.890046 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:16 crc kubenswrapper[4591]: I1203 12:07:16.890294 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:16 crc kubenswrapper[4591]: E1203 12:07:16.890420 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5drvq" podUID="8cd44649-dee5-4a99-8123-059f30fd0c1b" Dec 03 12:07:17 crc kubenswrapper[4591]: I1203 12:07:17.889846 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:17 crc kubenswrapper[4591]: I1203 12:07:17.889851 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:17 crc kubenswrapper[4591]: I1203 12:07:17.889866 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:17 crc kubenswrapper[4591]: E1203 12:07:17.890187 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:07:17 crc kubenswrapper[4591]: E1203 12:07:17.890260 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:07:17 crc kubenswrapper[4591]: E1203 12:07:17.889981 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:07:18 crc kubenswrapper[4591]: I1203 12:07:18.889989 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:18 crc kubenswrapper[4591]: I1203 12:07:18.892201 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 12:07:18 crc kubenswrapper[4591]: I1203 12:07:18.893979 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.890508 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.890508 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.890531 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.892093 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.892477 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.892648 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 12:07:19 crc kubenswrapper[4591]: I1203 12:07:19.893876 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.248937 4591 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.275146 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.275568 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.276612 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xg6n2"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.276929 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.277082 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.277292 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.280977 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.281284 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.282331 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.282922 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.283231 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.283801 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.284022 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.284346 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.285566 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.285913 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-gdw9n"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.286153 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.286342 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.286550 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.286787 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:25 crc kubenswrapper[4591]: W1203 12:07:25.293436 4591 reflector.go:561] object-"openshift-console-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 03 12:07:25 crc kubenswrapper[4591]: E1203 12:07:25.293472 4591 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.293544 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.293552 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.293819 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.293946 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294048 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xc9nk"] Dec 03 12:07:25 crc kubenswrapper[4591]: W1203 12:07:25.294194 4591 reflector.go:561] object-"openshift-console-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 03 12:07:25 crc kubenswrapper[4591]: E1203 12:07:25.294217 4591 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294346 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294572 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294768 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294837 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-dchgs"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.295254 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294845 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294868 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294900 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294930 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.294960 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.295335 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.295419 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.295444 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.296279 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.296322 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.296281 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.296381 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.296730 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.305659 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.309341 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.310092 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.311579 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.324340 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.324470 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.324938 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.325213 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.325840 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.325981 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.326048 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.326272 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.326366 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.326486 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.326872 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.327265 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.327841 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.328650 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mc4zp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.329131 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.329749 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.329800 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.329992 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.330205 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.331551 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kdctl"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.332314 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.334665 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335032 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335155 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335421 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335536 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335488 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.335930 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336091 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336353 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336526 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336631 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336868 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336897 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.336995 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337010 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pjthj"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337091 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337229 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337289 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337312 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337431 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337534 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.337829 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.338304 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.338734 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.338778 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.338871 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339018 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339127 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339137 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339151 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339234 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339245 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339321 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339393 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339480 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339505 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339637 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339711 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339780 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339852 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.339929 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340001 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340099 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340180 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340250 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340322 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340425 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340509 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340532 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-czcvh"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340581 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340651 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340726 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340798 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340862 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-64znj"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.340870 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.341242 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.341390 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8n547"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.341408 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.341975 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.342669 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.342769 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.342869 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.342946 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343096 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343207 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343331 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343439 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343441 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343503 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343601 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343658 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343741 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343818 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343893 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.343962 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344025 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344153 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344197 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344257 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344325 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344660 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.344961 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.345047 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.345342 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.346404 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.346856 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8d6cq"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.346975 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.361000 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.364175 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.364302 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.369551 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.370374 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.370789 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.370941 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.371128 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.371369 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.371440 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.371635 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.374217 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.374697 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.375084 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.375256 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.375830 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.376301 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.377550 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.377678 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dr86v"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.378151 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.378760 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.379235 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.379575 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.380452 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xg6n2"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.381552 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.382870 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.383752 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xc9nk"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.389747 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.390364 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.393367 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gdw9n"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.394531 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.394822 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395253 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-auth-proxy-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395282 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395303 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395335 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395356 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395383 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395401 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtpvt\" (UniqueName: \"kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395435 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-serving-cert\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395451 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395467 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395542 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzfdq\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-kube-api-access-fzfdq\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395573 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7350e5d8-3a9e-4c57-9e86-910646ee95c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395593 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395596 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mc4zp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395613 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-profile-collector-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.395629 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhkl2\" (UniqueName: \"kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396139 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49493d9d-b35e-4a1e-8ecb-730ec9793700-metrics-tls\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396198 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvzh2\" (UniqueName: \"kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396227 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-252ng\" (UniqueName: \"kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396246 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-srv-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396272 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396292 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396314 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396332 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt5rk\" (UniqueName: \"kubernetes.io/projected/540db196-6650-4e0d-ad6f-3b97bbc003c7-kube-api-access-dt5rk\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396355 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396392 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gspjw\" (UniqueName: \"kubernetes.io/projected/ded20790-338d-408a-8087-daf3a7906285-kube-api-access-gspjw\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396410 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-serving-cert\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396425 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396447 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn2jm\" (UniqueName: \"kubernetes.io/projected/830deeec-973c-4826-9357-b341f6a4b399-kube-api-access-zn2jm\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396468 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396486 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xbbm\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-kube-api-access-4xbbm\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396519 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzg7z\" (UniqueName: \"kubernetes.io/projected/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-kube-api-access-tzg7z\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396535 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c70c478-feff-4a08-9522-96297d92ba2b-serving-cert\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396566 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396584 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/39c241dd-677f-4270-b941-a71c3fab94d9-audit-dir\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396599 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrvwl\" (UniqueName: \"kubernetes.io/projected/78feea03-d859-44c3-8832-a765ac762e2c-kube-api-access-zrvwl\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396617 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396638 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396655 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ded20790-338d-408a-8087-daf3a7906285-machine-approver-tls\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396673 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-config\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396688 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396705 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396728 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/540db196-6650-4e0d-ad6f-3b97bbc003c7-serving-cert\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396746 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396760 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/671f4fe9-fef4-40d1-9379-6d90d29320f4-proxy-tls\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396774 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396793 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396810 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396824 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396838 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396853 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-config\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396870 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396891 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396906 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/671f4fe9-fef4-40d1-9379-6d90d29320f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396924 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-audit-policies\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396990 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-srv-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.396891 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kdctl"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397034 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pct6m\" (UniqueName: \"kubernetes.io/projected/ab6e3a66-178d-4d51-b9f5-0ed128342a20-kube-api-access-pct6m\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397079 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7c70c478-feff-4a08-9522-96297d92ba2b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397100 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/830deeec-973c-4826-9357-b341f6a4b399-metrics-tls\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397114 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397132 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397197 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397211 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-images\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397228 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397246 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mjw4\" (UniqueName: \"kubernetes.io/projected/39c241dd-677f-4270-b941-a71c3fab94d9-kube-api-access-4mjw4\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397278 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397303 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktj6p\" (UniqueName: \"kubernetes.io/projected/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-kube-api-access-ktj6p\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397318 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-729rw\" (UniqueName: \"kubernetes.io/projected/671f4fe9-fef4-40d1-9379-6d90d29320f4-kube-api-access-729rw\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397334 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft92c\" (UniqueName: \"kubernetes.io/projected/7c70c478-feff-4a08-9522-96297d92ba2b-kube-api-access-ft92c\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397348 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-client\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397364 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79jcm\" (UniqueName: \"kubernetes.io/projected/b2cf8937-46f9-42a3-965e-1c971c1a544b-kube-api-access-79jcm\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397380 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9fzc\" (UniqueName: \"kubernetes.io/projected/b655df3b-049f-4f7e-9f17-9d84bb008c0a-kube-api-access-w9fzc\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397394 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397410 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397424 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49493d9d-b35e-4a1e-8ecb-730ec9793700-trusted-ca\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397440 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397455 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397469 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-encryption-config\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397484 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-service-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397508 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397557 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397586 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397609 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7350e5d8-3a9e-4c57-9e86-910646ee95c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397630 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-etcd-client\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397660 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-config\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.397692 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.399139 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.400056 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.402282 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.403133 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.404166 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.406488 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.409711 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.409745 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8n547"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.410670 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pjthj"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.411393 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-64znj"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.414734 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.414761 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.416375 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.417488 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.418152 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.418488 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.419636 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-bvpwm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.420624 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.420711 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.421441 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dr86v"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.425709 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.428882 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.433407 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.434703 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.436099 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.436576 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8d6cq"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.437221 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.437484 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-czcvh"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.438432 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.439487 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-868m7"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.440272 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.440378 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2xzp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.441230 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.441680 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-868m7"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.442283 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2xzp"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.457002 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.477602 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.485172 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-fp692"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.486279 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-fp692" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.494354 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-fp692"] Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.496818 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498222 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498260 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtpvt\" (UniqueName: \"kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498284 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498326 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-serving-cert\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498355 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498382 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzfdq\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-kube-api-access-fzfdq\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498407 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498433 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7350e5d8-3a9e-4c57-9e86-910646ee95c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498456 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49493d9d-b35e-4a1e-8ecb-730ec9793700-metrics-tls\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498483 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-profile-collector-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498520 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhkl2\" (UniqueName: \"kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498545 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvzh2\" (UniqueName: \"kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498565 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498589 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-252ng\" (UniqueName: \"kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498610 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-srv-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498629 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498658 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498684 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt5rk\" (UniqueName: \"kubernetes.io/projected/540db196-6650-4e0d-ad6f-3b97bbc003c7-kube-api-access-dt5rk\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498705 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498726 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gspjw\" (UniqueName: \"kubernetes.io/projected/ded20790-338d-408a-8087-daf3a7906285-kube-api-access-gspjw\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498745 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-serving-cert\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498765 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498791 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498809 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn2jm\" (UniqueName: \"kubernetes.io/projected/830deeec-973c-4826-9357-b341f6a4b399-kube-api-access-zn2jm\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498827 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzg7z\" (UniqueName: \"kubernetes.io/projected/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-kube-api-access-tzg7z\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498850 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c70c478-feff-4a08-9522-96297d92ba2b-serving-cert\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498888 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498905 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/39c241dd-677f-4270-b941-a71c3fab94d9-audit-dir\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498925 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xbbm\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-kube-api-access-4xbbm\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498925 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498945 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrvwl\" (UniqueName: \"kubernetes.io/projected/78feea03-d859-44c3-8832-a765ac762e2c-kube-api-access-zrvwl\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498963 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.498984 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-config\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499003 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499024 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499042 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ded20790-338d-408a-8087-daf3a7906285-machine-approver-tls\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499078 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/540db196-6650-4e0d-ad6f-3b97bbc003c7-serving-cert\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499098 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499118 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499145 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499164 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/671f4fe9-fef4-40d1-9379-6d90d29320f4-proxy-tls\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499185 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499212 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499233 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499256 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499281 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-config\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499630 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499666 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/671f4fe9-fef4-40d1-9379-6d90d29320f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499670 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499691 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499714 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-audit-policies\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499769 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/830deeec-973c-4826-9357-b341f6a4b399-metrics-tls\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499813 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499838 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-srv-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499866 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pct6m\" (UniqueName: \"kubernetes.io/projected/ab6e3a66-178d-4d51-b9f5-0ed128342a20-kube-api-access-pct6m\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499897 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7c70c478-feff-4a08-9522-96297d92ba2b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499919 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-images\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499944 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.499977 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mjw4\" (UniqueName: \"kubernetes.io/projected/39c241dd-677f-4270-b941-a71c3fab94d9-kube-api-access-4mjw4\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500054 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500113 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktj6p\" (UniqueName: \"kubernetes.io/projected/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-kube-api-access-ktj6p\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500162 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79jcm\" (UniqueName: \"kubernetes.io/projected/b2cf8937-46f9-42a3-965e-1c971c1a544b-kube-api-access-79jcm\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500183 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-729rw\" (UniqueName: \"kubernetes.io/projected/671f4fe9-fef4-40d1-9379-6d90d29320f4-kube-api-access-729rw\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500230 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft92c\" (UniqueName: \"kubernetes.io/projected/7c70c478-feff-4a08-9522-96297d92ba2b-kube-api-access-ft92c\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500254 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-client\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500280 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9fzc\" (UniqueName: \"kubernetes.io/projected/b655df3b-049f-4f7e-9f17-9d84bb008c0a-kube-api-access-w9fzc\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500304 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500329 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500354 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500379 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500404 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49493d9d-b35e-4a1e-8ecb-730ec9793700-trusted-ca\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500426 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-service-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500447 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500450 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-audit-policies\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500467 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-encryption-config\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500509 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500529 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500550 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7350e5d8-3a9e-4c57-9e86-910646ee95c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500572 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-etcd-client\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500595 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-config\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500664 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500687 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500709 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-auth-proxy-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500727 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500763 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.500792 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.501771 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7350e5d8-3a9e-4c57-9e86-910646ee95c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.502742 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.502894 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/39c241dd-677f-4270-b941-a71c3fab94d9-audit-dir\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.504100 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.504237 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.504641 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.504721 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-service-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.504993 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.505136 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.505091 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-config\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.505014 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ded20790-338d-408a-8087-daf3a7906285-auth-proxy-config\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.505670 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.505939 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/540db196-6650-4e0d-ad6f-3b97bbc003c7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.506295 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.506672 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.506829 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-config\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507042 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7c70c478-feff-4a08-9522-96297d92ba2b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507157 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ded20790-338d-408a-8087-daf3a7906285-machine-approver-tls\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507197 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-encryption-config\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507272 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507337 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/78feea03-d859-44c3-8832-a765ac762e2c-etcd-service-ca\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507450 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-serving-cert\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507744 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507835 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507863 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.507862 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/671f4fe9-fef4-40d1-9379-6d90d29320f4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.508550 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.508716 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.508777 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.509013 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.509124 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-serving-cert\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.509164 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.509367 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/78feea03-d859-44c3-8832-a765ac762e2c-etcd-client\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.510153 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.510361 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7350e5d8-3a9e-4c57-9e86-910646ee95c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.510469 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/540db196-6650-4e0d-ad6f-3b97bbc003c7-serving-cert\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.510595 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.510931 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.511538 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.517189 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.523837 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/39c241dd-677f-4270-b941-a71c3fab94d9-etcd-client\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.536926 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.556820 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.576674 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.596687 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.617280 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.623748 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-profile-collector-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.625741 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.637432 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.658512 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.666762 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2cf8937-46f9-42a3-965e-1c971c1a544b-srv-cert\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.696953 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.716623 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.737220 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.757000 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.766806 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c70c478-feff-4a08-9522-96297d92ba2b-serving-cert\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.777996 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.797647 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.817437 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.837018 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.846884 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.857197 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.877573 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.897218 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.904091 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.916877 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.922397 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.936872 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.944599 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.965892 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.968507 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.976603 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.984350 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:25 crc kubenswrapper[4591]: I1203 12:07:25.997816 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.017502 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.037512 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.056645 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.064734 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/830deeec-973c-4826-9357-b341f6a4b399-metrics-tls\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.077299 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.097325 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.106010 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/671f4fe9-fef4-40d1-9379-6d90d29320f4-proxy-tls\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.117527 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.137033 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.146817 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.156856 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.164665 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-config\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.177445 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.184058 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-images\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.196719 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.217475 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.237607 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.256804 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.277405 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.297343 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.317411 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.336793 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.356558 4591 request.go:700] Waited for 1.012148573s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/secrets?fieldSelector=metadata.name%3Dmetrics-tls&limit=500&resourceVersion=0 Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.357566 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.364657 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/49493d9d-b35e-4a1e-8ecb-730ec9793700-metrics-tls\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.382023 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.386084 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/49493d9d-b35e-4a1e-8ecb-730ec9793700-trusted-ca\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.396589 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.416510 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.438461 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.457512 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.477498 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.484465 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab6e3a66-178d-4d51-b9f5-0ed128342a20-srv-cert\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.497004 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.503369 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:26 crc kubenswrapper[4591]: E1203 12:07:26.505141 4591 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Dec 03 12:07:26 crc kubenswrapper[4591]: E1203 12:07:26.505206 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs podName:b655df3b-049f-4f7e-9f17-9d84bb008c0a nodeName:}" failed. No retries permitted until 2025-12-03 12:07:27.005190621 +0000 UTC m=+144.432230392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs") pod "multus-admission-controller-857f4d67dd-8d6cq" (UID: "b655df3b-049f-4f7e-9f17-9d84bb008c0a") : failed to sync secret cache: timed out waiting for the condition Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.537562 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.557159 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.577661 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.596767 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.621412 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.637356 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.657812 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.677457 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.697447 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.716868 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.736826 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.757131 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.777260 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.796686 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.817292 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.836586 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.857013 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.877548 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.897273 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.917802 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.937133 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.957429 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.977158 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 12:07:26 crc kubenswrapper[4591]: I1203 12:07:26.996800 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.017124 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.019186 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.021976 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b655df3b-049f-4f7e-9f17-9d84bb008c0a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.036521 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.056929 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.077313 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.097218 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.117824 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.137564 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.156910 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.177125 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.197304 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.217782 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.236950 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.257019 4591 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.277440 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.297656 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.316743 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.348439 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtpvt\" (UniqueName: \"kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt\") pod \"console-f9d7485db-cnc8r\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.369633 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzfdq\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-kube-api-access-fzfdq\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.376055 4591 request.go:700] Waited for 1.87538118s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa/token Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.388371 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvzh2\" (UniqueName: \"kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2\") pod \"controller-manager-879f6c89f-78vjm\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.393154 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.408120 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gspjw\" (UniqueName: \"kubernetes.io/projected/ded20790-338d-408a-8087-daf3a7906285-kube-api-access-gspjw\") pod \"machine-approver-56656f9798-trxnq\" (UID: \"ded20790-338d-408a-8087-daf3a7906285\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.426865 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.429705 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhkl2\" (UniqueName: \"kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2\") pod \"oauth-openshift-558db77b4-tr2zc\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:27 crc kubenswrapper[4591]: W1203 12:07:27.438170 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podded20790_338d_408a_8087_daf3a7906285.slice/crio-fc6a93aa538abd6d46e171f185626667d1e542820a1fd6daa083f96451efba96 WatchSource:0}: Error finding container fc6a93aa538abd6d46e171f185626667d1e542820a1fd6daa083f96451efba96: Status 404 returned error can't find the container with id fc6a93aa538abd6d46e171f185626667d1e542820a1fd6daa083f96451efba96 Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.452409 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-252ng\" (UniqueName: \"kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng\") pod \"route-controller-manager-6576b87f9c-r9rbv\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.471116 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mjw4\" (UniqueName: \"kubernetes.io/projected/39c241dd-677f-4270-b941-a71c3fab94d9-kube-api-access-4mjw4\") pod \"apiserver-7bbb656c7d-6fvx9\" (UID: \"39c241dd-677f-4270-b941-a71c3fab94d9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.481739 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.490546 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-729rw\" (UniqueName: \"kubernetes.io/projected/671f4fe9-fef4-40d1-9379-6d90d29320f4-kube-api-access-729rw\") pod \"machine-config-controller-84d6567774-8n547\" (UID: \"671f4fe9-fef4-40d1-9379-6d90d29320f4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.491189 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.511972 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktj6p\" (UniqueName: \"kubernetes.io/projected/2e0ab3d0-1659-4b4f-878e-97fa8e1940e2-kube-api-access-ktj6p\") pod \"package-server-manager-789f6589d5-9vfpm\" (UID: \"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.531050 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79jcm\" (UniqueName: \"kubernetes.io/projected/b2cf8937-46f9-42a3-965e-1c971c1a544b-kube-api-access-79jcm\") pod \"olm-operator-6b444d44fb-m7nk6\" (UID: \"b2cf8937-46f9-42a3-965e-1c971c1a544b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.555344 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt5rk\" (UniqueName: \"kubernetes.io/projected/540db196-6650-4e0d-ad6f-3b97bbc003c7-kube-api-access-dt5rk\") pod \"authentication-operator-69f744f599-mc4zp\" (UID: \"540db196-6650-4e0d-ad6f-3b97bbc003c7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.565972 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.576171 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.576506 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft92c\" (UniqueName: \"kubernetes.io/projected/7c70c478-feff-4a08-9522-96297d92ba2b-kube-api-access-ft92c\") pod \"openshift-config-operator-7777fb866f-pjthj\" (UID: \"7c70c478-feff-4a08-9522-96297d92ba2b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.577094 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.591123 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.592499 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9fzc\" (UniqueName: \"kubernetes.io/projected/b655df3b-049f-4f7e-9f17-9d84bb008c0a-kube-api-access-w9fzc\") pod \"multus-admission-controller-857f4d67dd-8d6cq\" (UID: \"b655df3b-049f-4f7e-9f17-9d84bb008c0a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.598477 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.617628 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xbbm\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-kube-api-access-4xbbm\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.627978 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.630671 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pct6m\" (UniqueName: \"kubernetes.io/projected/ab6e3a66-178d-4d51-b9f5-0ed128342a20-kube-api-access-pct6m\") pod \"catalog-operator-68c6474976-n46k2\" (UID: \"ab6e3a66-178d-4d51-b9f5-0ed128342a20\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.639662 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.652735 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrvwl\" (UniqueName: \"kubernetes.io/projected/78feea03-d859-44c3-8832-a765ac762e2c-kube-api-access-zrvwl\") pod \"etcd-operator-b45778765-xc9nk\" (UID: \"78feea03-d859-44c3-8832-a765ac762e2c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.659241 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" Dec 03 12:07:27 crc kubenswrapper[4591]: W1203 12:07:27.660894 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda76a4f2a_891a_4931_b1a6_49208d5b2c01.slice/crio-1c589abb94ff7c66d9ea476662e00cfd778335397f8383cdabc1d99377b47ba6 WatchSource:0}: Error finding container 1c589abb94ff7c66d9ea476662e00cfd778335397f8383cdabc1d99377b47ba6: Status 404 returned error can't find the container with id 1c589abb94ff7c66d9ea476662e00cfd778335397f8383cdabc1d99377b47ba6 Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.665079 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9"] Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.674155 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzg7z\" (UniqueName: \"kubernetes.io/projected/f46fb0dc-2855-48f1-b744-1b5cc24f00e9-kube-api-access-tzg7z\") pod \"machine-api-operator-5694c8668f-czcvh\" (UID: \"f46fb0dc-2855-48f1-b744-1b5cc24f00e9\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.684232 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:27 crc kubenswrapper[4591]: W1203 12:07:27.686191 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39c241dd_677f_4270_b941_a71c3fab94d9.slice/crio-3d54957069118346626404b08404864637bb965a5de95654a6684a3d456d3186 WatchSource:0}: Error finding container 3d54957069118346626404b08404864637bb965a5de95654a6684a3d456d3186: Status 404 returned error can't find the container with id 3d54957069118346626404b08404864637bb965a5de95654a6684a3d456d3186 Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.688325 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.691186 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn2jm\" (UniqueName: \"kubernetes.io/projected/830deeec-973c-4826-9357-b341f6a4b399-kube-api-access-zn2jm\") pod \"dns-operator-744455d44c-64znj\" (UID: \"830deeec-973c-4826-9357-b341f6a4b399\") " pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.707319 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.708130 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/49493d9d-b35e-4a1e-8ecb-730ec9793700-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b98hz\" (UID: \"49493d9d-b35e-4a1e-8ecb-730ec9793700\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.733731 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7350e5d8-3a9e-4c57-9e86-910646ee95c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cfw6s\" (UID: \"7350e5d8-3a9e-4c57-9e86-910646ee95c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.758312 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.771493 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.778177 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 12:07:27 crc kubenswrapper[4591]: W1203 12:07:27.790155 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e7d77fe_8f25_4c1f_8d0d_6a8413a32e9a.slice/crio-b3946a75c0b755beddb19262694ef47e7a3768386fa55e885858cf15ec926d26 WatchSource:0}: Error finding container b3946a75c0b755beddb19262694ef47e7a3768386fa55e885858cf15ec926d26: Status 404 returned error can't find the container with id b3946a75c0b755beddb19262694ef47e7a3768386fa55e885858cf15ec926d26 Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.799581 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.830770 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831362 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831530 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831556 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h45zb\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: E1203 12:07:27.831620 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:09:29.831584554 +0000 UTC m=+267.258624325 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831659 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-node-pullsecrets\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831704 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/047dfe24-1216-41aa-93be-7d434cb0fff6-service-ca-bundle\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831734 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831777 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2qlf\" (UniqueName: \"kubernetes.io/projected/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-kube-api-access-v2qlf\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831794 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8156e38b-b819-40eb-8022-403bd494a981-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831809 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tldtx\" (UniqueName: \"kubernetes.io/projected/e15736ce-de7b-465b-8150-e52bfe141765-kube-api-access-tldtx\") pod \"migrator-59844c95c7-zntmm\" (UID: \"e15736ce-de7b-465b-8150-e52bfe141765\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831846 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h4jg\" (UniqueName: \"kubernetes.io/projected/741b2760-bc39-47b7-9a01-59c46af6e440-kube-api-access-5h4jg\") pod \"downloads-7954f5f757-gdw9n\" (UID: \"741b2760-bc39-47b7-9a01-59c46af6e440\") " pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831863 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831922 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-encryption-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831941 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831959 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-config\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.831977 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e104273c-2ed5-4e1f-ae08-33412e0cfd00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832003 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4gvz\" (UniqueName: \"kubernetes.io/projected/98a13519-a12b-4d1a-817e-aa1a67e8f289-kube-api-access-s4gvz\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832018 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-serving-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832046 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmf57\" (UniqueName: \"kubernetes.io/projected/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-kube-api-access-pmf57\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832092 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832112 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832128 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-image-import-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832144 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832161 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832177 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-serving-cert\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832191 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-trusted-ca\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832214 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832275 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832292 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgkvh\" (UniqueName: \"kubernetes.io/projected/047dfe24-1216-41aa-93be-7d434cb0fff6-kube-api-access-mgkvh\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832346 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e104273c-2ed5-4e1f-ae08-33412e0cfd00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832363 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-stats-auth\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832412 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832432 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmn9w\" (UniqueName: \"kubernetes.io/projected/06a84866-8b8f-46f7-ae01-f3c5929a5814-kube-api-access-vmn9w\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832447 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832532 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj2hs\" (UniqueName: \"kubernetes.io/projected/e104273c-2ed5-4e1f-ae08-33412e0cfd00-kube-api-access-hj2hs\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832559 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98a13519-a12b-4d1a-817e-aa1a67e8f289-proxy-tls\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832582 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crllz\" (UniqueName: \"kubernetes.io/projected/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-kube-api-access-crllz\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832614 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832663 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832700 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832722 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068983ac-e598-4d3c-aafe-43d86b49bd4a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832737 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-client\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832754 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhltp\" (UniqueName: \"kubernetes.io/projected/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-kube-api-access-nhltp\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832768 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-metrics-certs\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832789 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832848 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068983ac-e598-4d3c-aafe-43d86b49bd4a-config\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832870 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8156e38b-b819-40eb-8022-403bd494a981-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832888 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8156e38b-b819-40eb-8022-403bd494a981-config\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832901 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832917 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068983ac-e598-4d3c-aafe-43d86b49bd4a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832932 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832948 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-images\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832964 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.832980 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-default-certificate\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.833005 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit-dir\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: E1203 12:07:27.833018 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.333009076 +0000 UTC m=+145.760048845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.833035 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06a84866-8b8f-46f7-ae01-f3c5929a5814-serving-cert\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.906382 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8n547"] Dec 03 12:07:27 crc kubenswrapper[4591]: W1203 12:07:27.925199 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod671f4fe9_fef4_40d1_9379_6d90d29320f4.slice/crio-c74e6ab6244c69d7cdf79be4070cc93b312dc505da1384d73fce69f2e4df8609 WatchSource:0}: Error finding container c74e6ab6244c69d7cdf79be4070cc93b312dc505da1384d73fce69f2e4df8609: Status 404 returned error can't find the container with id c74e6ab6244c69d7cdf79be4070cc93b312dc505da1384d73fce69f2e4df8609 Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.934724 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:27 crc kubenswrapper[4591]: E1203 12:07:27.934846 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.434828003 +0000 UTC m=+145.861867773 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.934902 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crllz\" (UniqueName: \"kubernetes.io/projected/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-kube-api-access-crllz\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.934928 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.934991 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935031 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8wgf\" (UniqueName: \"kubernetes.io/projected/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-kube-api-access-n8wgf\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935098 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935115 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935131 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068983ac-e598-4d3c-aafe-43d86b49bd4a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935153 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-client\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935168 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lzdn\" (UniqueName: \"kubernetes.io/projected/136811dd-de89-44c9-a2b2-01bdabecec20-kube-api-access-6lzdn\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935194 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhltp\" (UniqueName: \"kubernetes.io/projected/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-kube-api-access-nhltp\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935219 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-metrics-certs\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935246 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935263 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935302 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-webhook-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935366 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068983ac-e598-4d3c-aafe-43d86b49bd4a-config\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935381 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8156e38b-b819-40eb-8022-403bd494a981-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935427 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-plugins-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935453 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49jwd\" (UniqueName: \"kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935500 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8156e38b-b819-40eb-8022-403bd494a981-config\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935520 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935560 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068983ac-e598-4d3c-aafe-43d86b49bd4a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935577 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935610 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-images\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935647 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935663 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352d91de-d668-423d-af61-44fced909890-serving-cert\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935680 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-default-certificate\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935702 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/136811dd-de89-44c9-a2b2-01bdabecec20-cert\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935722 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935760 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit-dir\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935777 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06a84866-8b8f-46f7-ae01-f3c5929a5814-serving-cert\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935792 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-mountpoint-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935823 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935838 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h45zb\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935854 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-node-pullsecrets\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935868 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/047dfe24-1216-41aa-93be-7d434cb0fff6-service-ca-bundle\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935887 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q475\" (UniqueName: \"kubernetes.io/projected/d242eec9-e870-47a6-9eae-548e4634af2a-kube-api-access-7q475\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935911 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-key\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935933 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935960 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.935984 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2qlf\" (UniqueName: \"kubernetes.io/projected/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-kube-api-access-v2qlf\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.936039 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8156e38b-b819-40eb-8022-403bd494a981-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.936436 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: E1203 12:07:27.937160 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.437149796 +0000 UTC m=+145.864189567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.938776 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.939140 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.939172 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-images\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.939950 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbxqz\" (UniqueName: \"kubernetes.io/projected/e43b9c0c-5961-4226-adf7-59660279b3a0-kube-api-access-kbxqz\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.939981 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tnbn\" (UniqueName: \"kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940009 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tldtx\" (UniqueName: \"kubernetes.io/projected/e15736ce-de7b-465b-8150-e52bfe141765-kube-api-access-tldtx\") pod \"migrator-59844c95c7-zntmm\" (UID: \"e15736ce-de7b-465b-8150-e52bfe141765\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940030 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-socket-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940050 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940086 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h4jg\" (UniqueName: \"kubernetes.io/projected/741b2760-bc39-47b7-9a01-59c46af6e440-kube-api-access-5h4jg\") pod \"downloads-7954f5f757-gdw9n\" (UID: \"741b2760-bc39-47b7-9a01-59c46af6e440\") " pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940104 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-cabundle\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940141 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940252 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vk8x\" (UniqueName: \"kubernetes.io/projected/e72f6cd2-29a9-4921-bee2-229f9fd19774-kube-api-access-9vk8x\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940279 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-encryption-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940298 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940452 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-apiservice-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940516 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-config\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940536 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e104273c-2ed5-4e1f-ae08-33412e0cfd00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940553 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4gvz\" (UniqueName: \"kubernetes.io/projected/98a13519-a12b-4d1a-817e-aa1a67e8f289-kube-api-access-s4gvz\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940568 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-serving-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940584 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmf57\" (UniqueName: \"kubernetes.io/projected/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-kube-api-access-pmf57\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940602 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6698m\" (UniqueName: \"kubernetes.io/projected/352d91de-d668-423d-af61-44fced909890-kube-api-access-6698m\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940821 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e43b9c0c-5961-4226-adf7-59660279b3a0-metrics-tls\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940840 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940859 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940874 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e72f6cd2-29a9-4921-bee2-229f9fd19774-tmpfs\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940908 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-image-import-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940926 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940953 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940967 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-serving-cert\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940980 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-trusted-ca\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.940996 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941012 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-node-bootstrap-token\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941038 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941083 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-metrics-certs\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941086 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-csi-data-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941143 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgkvh\" (UniqueName: \"kubernetes.io/projected/047dfe24-1216-41aa-93be-7d434cb0fff6-kube-api-access-mgkvh\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941169 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941195 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941221 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e104273c-2ed5-4e1f-ae08-33412e0cfd00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941243 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-stats-auth\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941261 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e43b9c0c-5961-4226-adf7-59660279b3a0-config-volume\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941300 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941329 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941356 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-registration-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941394 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmn9w\" (UniqueName: \"kubernetes.io/projected/06a84866-8b8f-46f7-ae01-f3c5929a5814-kube-api-access-vmn9w\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941412 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzvlr\" (UniqueName: \"kubernetes.io/projected/f26d8d0d-6d96-432b-9921-ffa4af508729-kube-api-access-mzvlr\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941444 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941518 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vtbt\" (UniqueName: \"kubernetes.io/projected/bff50f24-9c88-4db0-bf44-e9a5a4b36431-kube-api-access-2vtbt\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941734 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj2hs\" (UniqueName: \"kubernetes.io/projected/e104273c-2ed5-4e1f-ae08-33412e0cfd00-kube-api-access-hj2hs\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941753 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98a13519-a12b-4d1a-817e-aa1a67e8f289-proxy-tls\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941776 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352d91de-d668-423d-af61-44fced909890-config\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941797 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.941827 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-certs\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.942254 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/068983ac-e598-4d3c-aafe-43d86b49bd4a-config\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.942641 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8156e38b-b819-40eb-8022-403bd494a981-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.942878 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.942990 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.942998 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-node-pullsecrets\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.943102 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8156e38b-b819-40eb-8022-403bd494a981-config\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.943112 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-audit-dir\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.943589 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.943616 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/047dfe24-1216-41aa-93be-7d434cb0fff6-service-ca-bundle\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.943814 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/98a13519-a12b-4d1a-817e-aa1a67e8f289-auth-proxy-config\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.944291 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.944536 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.944571 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-client\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.945778 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-config\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.946223 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e104273c-2ed5-4e1f-ae08-33412e0cfd00-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.947364 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e104273c-2ed5-4e1f-ae08-33412e0cfd00-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.947816 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-etcd-serving-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.950091 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.954840 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.954864 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.955306 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.955624 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06a84866-8b8f-46f7-ae01-f3c5929a5814-trusted-ca\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.955789 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.956312 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-image-import-ca\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.956583 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm"] Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.957415 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.959428 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960133 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-serving-cert\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960230 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960232 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960509 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960645 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-default-certificate\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960761 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/98a13519-a12b-4d1a-817e-aa1a67e8f289-proxy-tls\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960820 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/068983ac-e598-4d3c-aafe-43d86b49bd4a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.960922 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06a84866-8b8f-46f7-ae01-f3c5929a5814-serving-cert\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.961102 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.961456 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.962289 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-encryption-config\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.962343 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.962518 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/047dfe24-1216-41aa-93be-7d434cb0fff6-stats-auth\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.974146 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.979235 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crllz\" (UniqueName: \"kubernetes.io/projected/fbbd067f-5929-4ab1-aecb-ab19da21aa5f-kube-api-access-crllz\") pod \"kube-storage-version-migrator-operator-b67b599dd-2wq84\" (UID: \"fbbd067f-5929-4ab1-aecb-ab19da21aa5f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:27 crc kubenswrapper[4591]: I1203 12:07:27.996052 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhltp\" (UniqueName: \"kubernetes.io/projected/3efc6c62-2216-4e59-ac20-9aa55fbd8a38-kube-api-access-nhltp\") pod \"apiserver-76f77b778f-kdctl\" (UID: \"3efc6c62-2216-4e59-ac20-9aa55fbd8a38\") " pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.001043 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.001449 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.010901 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.013265 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.020636 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2qlf\" (UniqueName: \"kubernetes.io/projected/4e92b06f-86a5-4db3-a4ea-81b914d29d9d-kube-api-access-v2qlf\") pod \"cluster-samples-operator-665b6dd947-vbtjn\" (UID: \"4e92b06f-86a5-4db3-a4ea-81b914d29d9d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.043013 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mc4zp"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.044969 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.045901 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068983ac-e598-4d3c-aafe-43d86b49bd4a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-g9gz4\" (UID: \"068983ac-e598-4d3c-aafe-43d86b49bd4a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.046328 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.546306171 +0000 UTC m=+145.973345941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047094 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-socket-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047128 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-cabundle\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047154 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vk8x\" (UniqueName: \"kubernetes.io/projected/e72f6cd2-29a9-4921-bee2-229f9fd19774-kube-api-access-9vk8x\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047176 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-apiservice-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047213 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6698m\" (UniqueName: \"kubernetes.io/projected/352d91de-d668-423d-af61-44fced909890-kube-api-access-6698m\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047469 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e72f6cd2-29a9-4921-bee2-229f9fd19774-tmpfs\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047503 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e43b9c0c-5961-4226-adf7-59660279b3a0-metrics-tls\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047716 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-node-bootstrap-token\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047741 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-csi-data-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.047864 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-socket-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.048032 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.048241 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e43b9c0c-5961-4226-adf7-59660279b3a0-config-volume\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.048346 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.048369 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-registration-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.048395 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e72f6cd2-29a9-4921-bee2-229f9fd19774-tmpfs\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049183 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-csi-data-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049489 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzvlr\" (UniqueName: \"kubernetes.io/projected/f26d8d0d-6d96-432b-9921-ffa4af508729-kube-api-access-mzvlr\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049524 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vtbt\" (UniqueName: \"kubernetes.io/projected/bff50f24-9c88-4db0-bf44-e9a5a4b36431-kube-api-access-2vtbt\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049555 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352d91de-d668-423d-af61-44fced909890-config\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049556 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-cabundle\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049575 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049594 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-certs\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049622 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8wgf\" (UniqueName: \"kubernetes.io/projected/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-kube-api-access-n8wgf\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049648 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lzdn\" (UniqueName: \"kubernetes.io/projected/136811dd-de89-44c9-a2b2-01bdabecec20-kube-api-access-6lzdn\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049680 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049700 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049714 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-webhook-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049748 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-plugins-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049767 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49jwd\" (UniqueName: \"kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049800 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352d91de-d668-423d-af61-44fced909890-serving-cert\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049815 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e43b9c0c-5961-4226-adf7-59660279b3a0-config-volume\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.050467 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/352d91de-d668-423d-af61-44fced909890-config\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.051681 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pjthj"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.054326 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.056502 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-registration-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.057487 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-plugins-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060301 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.049821 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/136811dd-de89-44c9-a2b2-01bdabecec20-cert\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060386 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-mountpoint-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060427 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q475\" (UniqueName: \"kubernetes.io/projected/d242eec9-e870-47a6-9eae-548e4634af2a-kube-api-access-7q475\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060439 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060459 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060447 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-key\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060533 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bff50f24-9c88-4db0-bf44-e9a5a4b36431-mountpoint-dir\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060544 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060552 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060837 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbxqz\" (UniqueName: \"kubernetes.io/projected/e43b9c0c-5961-4226-adf7-59660279b3a0-kube-api-access-kbxqz\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.060860 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tnbn\" (UniqueName: \"kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.061080 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.561035801 +0000 UTC m=+145.988075571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.061236 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.062441 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-apiservice-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.062639 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.065143 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e72f6cd2-29a9-4921-bee2-229f9fd19774-webhook-cert\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.065907 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/352d91de-d668-423d-af61-44fced909890-serving-cert\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.066850 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-node-bootstrap-token\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.067012 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/136811dd-de89-44c9-a2b2-01bdabecec20-cert\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.067590 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e43b9c0c-5961-4226-adf7-59660279b3a0-metrics-tls\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.067712 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.069759 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d242eec9-e870-47a6-9eae-548e4634af2a-certs\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.075198 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f26d8d0d-6d96-432b-9921-ffa4af508729-signing-key\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.093459 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h45zb\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.109985 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xc9nk"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.111637 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgkvh\" (UniqueName: \"kubernetes.io/projected/047dfe24-1216-41aa-93be-7d434cb0fff6-kube-api-access-mgkvh\") pod \"router-default-5444994796-dchgs\" (UID: \"047dfe24-1216-41aa-93be-7d434cb0fff6\") " pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:28 crc kubenswrapper[4591]: W1203 12:07:28.115085 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2cf8937_46f9_42a3_965e_1c971c1a544b.slice/crio-61a0a87afb82e799a18bb77edae79ad1be5f7b0a119acc167884250cf508121a WatchSource:0}: Error finding container 61a0a87afb82e799a18bb77edae79ad1be5f7b0a119acc167884250cf508121a: Status 404 returned error can't find the container with id 61a0a87afb82e799a18bb77edae79ad1be5f7b0a119acc167884250cf508121a Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.117371 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.124043 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tldtx\" (UniqueName: \"kubernetes.io/projected/e15736ce-de7b-465b-8150-e52bfe141765-kube-api-access-tldtx\") pod \"migrator-59844c95c7-zntmm\" (UID: \"e15736ce-de7b-465b-8150-e52bfe141765\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.130662 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8156e38b-b819-40eb-8022-403bd494a981-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-24xcm\" (UID: \"8156e38b-b819-40eb-8022-403bd494a981\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.138671 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.153940 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.162357 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.162568 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.662530369 +0000 UTC m=+146.089570139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.162630 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.163019 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.663007544 +0000 UTC m=+146.090047314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.178084 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h4jg\" (UniqueName: \"kubernetes.io/projected/741b2760-bc39-47b7-9a01-59c46af6e440-kube-api-access-5h4jg\") pod \"downloads-7954f5f757-gdw9n\" (UID: \"741b2760-bc39-47b7-9a01-59c46af6e440\") " pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.186042 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.194277 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8d6cq"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.194519 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4gvz\" (UniqueName: \"kubernetes.io/projected/98a13519-a12b-4d1a-817e-aa1a67e8f289-kube-api-access-s4gvz\") pod \"machine-config-operator-74547568cd-kwvmh\" (UID: \"98a13519-a12b-4d1a-817e-aa1a67e8f289\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.202170 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-czcvh"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.207966 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.210528 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.212496 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmn9w\" (UniqueName: \"kubernetes.io/projected/06a84866-8b8f-46f7-ae01-f3c5929a5814-kube-api-access-vmn9w\") pod \"console-operator-58897d9998-xg6n2\" (UID: \"06a84866-8b8f-46f7-ae01-f3c5929a5814\") " pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.228672 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmf57\" (UniqueName: \"kubernetes.io/projected/448fff50-3aec-4b69-8a4c-8d5862cb5fe1-kube-api-access-pmf57\") pod \"openshift-apiserver-operator-796bbdcf4f-6x4k7\" (UID: \"448fff50-3aec-4b69-8a4c-8d5862cb5fe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.252385 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qlgfr\" (UID: \"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.265405 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.265522 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.265703 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.765683909 +0000 UTC m=+146.192723679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.269927 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.272148 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj2hs\" (UniqueName: \"kubernetes.io/projected/e104273c-2ed5-4e1f-ae08-33412e0cfd00-kube-api-access-hj2hs\") pod \"openshift-controller-manager-operator-756b6f6bc6-d26b4\" (UID: \"e104273c-2ed5-4e1f-ae08-33412e0cfd00\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:28 crc kubenswrapper[4591]: W1203 12:07:28.286905 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf46fb0dc_2855_48f1_b744_1b5cc24f00e9.slice/crio-60a9303952a182c1343b7805993d44dfad81da905919c6288de70e9548df504f WatchSource:0}: Error finding container 60a9303952a182c1343b7805993d44dfad81da905919c6288de70e9548df504f: Status 404 returned error can't find the container with id 60a9303952a182c1343b7805993d44dfad81da905919c6288de70e9548df504f Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.297978 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vk8x\" (UniqueName: \"kubernetes.io/projected/e72f6cd2-29a9-4921-bee2-229f9fd19774-kube-api-access-9vk8x\") pod \"packageserver-d55dfcdfc-wfl6d\" (UID: \"e72f6cd2-29a9-4921-bee2-229f9fd19774\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.312567 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.313661 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6698m\" (UniqueName: \"kubernetes.io/projected/352d91de-d668-423d-af61-44fced909890-kube-api-access-6698m\") pod \"service-ca-operator-777779d784-fv4vx\" (UID: \"352d91de-d668-423d-af61-44fced909890\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.323802 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.335007 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vtbt\" (UniqueName: \"kubernetes.io/projected/bff50f24-9c88-4db0-bf44-e9a5a4b36431-kube-api-access-2vtbt\") pod \"csi-hostpathplugin-q2xzp\" (UID: \"bff50f24-9c88-4db0-bf44-e9a5a4b36431\") " pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.335326 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.356980 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzvlr\" (UniqueName: \"kubernetes.io/projected/f26d8d0d-6d96-432b-9921-ffa4af508729-kube-api-access-mzvlr\") pod \"service-ca-9c57cc56f-dr86v\" (UID: \"f26d8d0d-6d96-432b-9921-ffa4af508729\") " pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.370992 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.371414 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.871401216 +0000 UTC m=+146.298440986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.373836 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49jwd\" (UniqueName: \"kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd\") pod \"marketplace-operator-79b997595-849sq\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: W1203 12:07:28.373929 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-54c744b10069da8a621bd66c3e21d829081e5b91f3adf019fcff2b3fff7b6068 WatchSource:0}: Error finding container 54c744b10069da8a621bd66c3e21d829081e5b91f3adf019fcff2b3fff7b6068: Status 404 returned error can't find the container with id 54c744b10069da8a621bd66c3e21d829081e5b91f3adf019fcff2b3fff7b6068 Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.373992 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.378565 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.405864 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tnbn\" (UniqueName: \"kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn\") pod \"collect-profiles-29412720-cxwmp\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.410306 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.415605 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.417582 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbxqz\" (UniqueName: \"kubernetes.io/projected/e43b9c0c-5961-4226-adf7-59660279b3a0-kube-api-access-kbxqz\") pod \"dns-default-fp692\" (UID: \"e43b9c0c-5961-4226-adf7-59660279b3a0\") " pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.422276 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.433759 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lzdn\" (UniqueName: \"kubernetes.io/projected/136811dd-de89-44c9-a2b2-01bdabecec20-kube-api-access-6lzdn\") pod \"ingress-canary-868m7\" (UID: \"136811dd-de89-44c9-a2b2-01bdabecec20\") " pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.449218 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.471311 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" event={"ID":"78feea03-d859-44c3-8832-a765ac762e2c","Type":"ContainerStarted","Data":"a93254e4131140baccbad908c18df0e352813dfc501e048d69ef0a92ef51a37f"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.471619 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8wgf\" (UniqueName: \"kubernetes.io/projected/a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248-kube-api-access-n8wgf\") pod \"control-plane-machine-set-operator-78cbb6b69f-7ftqp\" (UID: \"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.471725 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.472456 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.472618 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.972579953 +0000 UTC m=+146.399619722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.473597 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.475347 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" event={"ID":"ab6e3a66-178d-4d51-b9f5-0ed128342a20","Type":"ContainerStarted","Data":"4ded9b0747c45afd235cfa3590db7a74d70c09a031684fb4ef52e532e2e78650"} Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.476189 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:28.976164203 +0000 UTC m=+146.403203973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.484821 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q475\" (UniqueName: \"kubernetes.io/projected/d242eec9-e870-47a6-9eae-548e4634af2a-kube-api-access-7q475\") pod \"machine-config-server-bvpwm\" (UID: \"d242eec9-e870-47a6-9eae-548e4634af2a\") " pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.495724 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-64znj"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.518671 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" event={"ID":"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2","Type":"ContainerStarted","Data":"ac915bde33b8549f33c26b7bbc94a04d56a53606734bf17a2da9ae3dfd474092"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.518725 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" event={"ID":"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2","Type":"ContainerStarted","Data":"f24a883481b8935d17860e287a313c29e7b728eaac7d08c1a12c1f41eec0c86d"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.545255 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" event={"ID":"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a","Type":"ContainerStarted","Data":"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.545298 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" event={"ID":"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a","Type":"ContainerStarted","Data":"b3946a75c0b755beddb19262694ef47e7a3768386fa55e885858cf15ec926d26"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.547962 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.575003 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.575408 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.07538578 +0000 UTC m=+146.502425550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: W1203 12:07:28.575907 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod830deeec_973c_4826_9357_b341f6a4b399.slice/crio-225897a1be300c22288ae9bcbc74e516b7138e083aa5e42f2fba5fbd80a0ecc9 WatchSource:0}: Error finding container 225897a1be300c22288ae9bcbc74e516b7138e083aa5e42f2fba5fbd80a0ecc9: Status 404 returned error can't find the container with id 225897a1be300c22288ae9bcbc74e516b7138e083aa5e42f2fba5fbd80a0ecc9 Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.575946 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.576292 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.076278854 +0000 UTC m=+146.503318624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.581401 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" event={"ID":"b2cf8937-46f9-42a3-965e-1c971c1a544b","Type":"ContainerStarted","Data":"d28dc3552dd5052bf96fe3c06d123d4bd0b62b9d5c620bddaff2c4dcf135a257"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.581440 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" event={"ID":"b2cf8937-46f9-42a3-965e-1c971c1a544b","Type":"ContainerStarted","Data":"61a0a87afb82e799a18bb77edae79ad1be5f7b0a119acc167884250cf508121a"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.582303 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.584319 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.587403 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" event={"ID":"a76a4f2a-891a-4931-b1a6-49208d5b2c01","Type":"ContainerStarted","Data":"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.587433 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" event={"ID":"a76a4f2a-891a-4931-b1a6-49208d5b2c01","Type":"ContainerStarted","Data":"1c589abb94ff7c66d9ea476662e00cfd778335397f8383cdabc1d99377b47ba6"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.587998 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.594671 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.600858 4591 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-m7nk6 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.600884 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" podUID="b2cf8937-46f9-42a3-965e-1c971c1a544b" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.617392 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" event={"ID":"7350e5d8-3a9e-4c57-9e86-910646ee95c9","Type":"ContainerStarted","Data":"9197b6bf14cced666fcc44ed25a1e06269eef1c761105516a2be1844f7ff3597"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.617412 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" event={"ID":"7350e5d8-3a9e-4c57-9e86-910646ee95c9","Type":"ContainerStarted","Data":"5e51d4e8af32fe80cc10e7747b06ebe028043675e849063c709efe968293d801"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.620912 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.649037 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.650770 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" event={"ID":"7c70c478-feff-4a08-9522-96297d92ba2b","Type":"ContainerStarted","Data":"e6e4a7329c051cb0ae966834fadbe7f57be83524d61bc539a3ea88c249b08a3d"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.650801 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" event={"ID":"7c70c478-feff-4a08-9522-96297d92ba2b","Type":"ContainerStarted","Data":"0db0fc2390cb15a63e22ac8a09534c8364fa2043cbc224fc82d5ef11d00114b2"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.651760 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.651991 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-bvpwm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.660335 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-868m7" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.677370 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.678566 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.178547424 +0000 UTC m=+146.605587195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.709742 4591 generic.go:334] "Generic (PLEG): container finished" podID="39c241dd-677f-4270-b941-a71c3fab94d9" containerID="6899b8174c80db6abb5d21563fbf738691204c6ea691de7ce4d457a730e4f460" exitCode=0 Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.709821 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" event={"ID":"39c241dd-677f-4270-b941-a71c3fab94d9","Type":"ContainerDied","Data":"6899b8174c80db6abb5d21563fbf738691204c6ea691de7ce4d457a730e4f460"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.709848 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" event={"ID":"39c241dd-677f-4270-b941-a71c3fab94d9","Type":"ContainerStarted","Data":"3d54957069118346626404b08404864637bb965a5de95654a6684a3d456d3186"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.715622 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-fp692" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.720174 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.726405 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.795056 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dchgs" event={"ID":"047dfe24-1216-41aa-93be-7d434cb0fff6","Type":"ContainerStarted","Data":"9e6751eba6b7cce5ebaa84e955afdafae5db27493a2ea6271bff4b459be98849"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.795746 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.798572 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.298559234 +0000 UTC m=+146.725599005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.808629 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.808749 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" event={"ID":"540db196-6650-4e0d-ad6f-3b97bbc003c7","Type":"ContainerStarted","Data":"f2897b8eb3264bd544a26b56cd40d9d328ee911a723085b7b47a0840c949f0b2"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.808779 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" event={"ID":"540db196-6650-4e0d-ad6f-3b97bbc003c7","Type":"ContainerStarted","Data":"3bcfa05f421860c4527f914b5d8e6b48a22d597947a32c2d99f5c80a3201ca8f"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.818645 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" event={"ID":"f46fb0dc-2855-48f1-b744-1b5cc24f00e9","Type":"ContainerStarted","Data":"60a9303952a182c1343b7805993d44dfad81da905919c6288de70e9548df504f"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.844718 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cnc8r" event={"ID":"3d1889fc-cf0c-4114-8653-a7b95c23bdd5","Type":"ContainerStarted","Data":"4912d6adc0c2b324cb495fa2ff955fe259f5e4d1600eedd226fe278241c503d4"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.859190 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.884044 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kdctl"] Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.897404 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:28 crc kubenswrapper[4591]: E1203 12:07:28.898436 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.398416593 +0000 UTC m=+146.825456363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.924821 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" event={"ID":"671f4fe9-fef4-40d1-9379-6d90d29320f4","Type":"ContainerStarted","Data":"3e0bfe176d1d9ef505fc410d748df6523bc3a38c13fc96e0b50ff69bfd8c2e9f"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.924860 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" event={"ID":"671f4fe9-fef4-40d1-9379-6d90d29320f4","Type":"ContainerStarted","Data":"c74e6ab6244c69d7cdf79be4070cc93b312dc505da1384d73fce69f2e4df8609"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.927634 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"54c744b10069da8a621bd66c3e21d829081e5b91f3adf019fcff2b3fff7b6068"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.930596 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" event={"ID":"49493d9d-b35e-4a1e-8ecb-730ec9793700","Type":"ContainerStarted","Data":"051f096ab41e3b86c77a87b01842b95e92bc61d81696d579bee76d881cff672d"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.932051 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" event={"ID":"ded20790-338d-408a-8087-daf3a7906285","Type":"ContainerStarted","Data":"b91a0d24591788aa69d4689059a74ae47915ab15ded0ebc65f8d27fb9253d9c3"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.932094 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" event={"ID":"ded20790-338d-408a-8087-daf3a7906285","Type":"ContainerStarted","Data":"64b699b1d3e0846e767fe28bdd70e5546ce32589b8d70ab693f2fc2e4a5deac4"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.932105 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" event={"ID":"ded20790-338d-408a-8087-daf3a7906285","Type":"ContainerStarted","Data":"fc6a93aa538abd6d46e171f185626667d1e542820a1fd6daa083f96451efba96"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.943989 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" event={"ID":"b655df3b-049f-4f7e-9f17-9d84bb008c0a","Type":"ContainerStarted","Data":"5786ed7cecc7d404efa185b0f25c3790129f123a9d41830a070b2b512dba323f"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.959192 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" event={"ID":"35f7aa4d-e222-4ee7-a1a3-93a226933fd6","Type":"ContainerStarted","Data":"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.959227 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" event={"ID":"35f7aa4d-e222-4ee7-a1a3-93a226933fd6","Type":"ContainerStarted","Data":"a9b64483411b3d5c8f32113df008b05e98833cfe5f0bbbd85400110a0c97c679"} Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.959598 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:28 crc kubenswrapper[4591]: I1203 12:07:28.964808 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.002116 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.002496 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.002731 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.502714729 +0000 UTC m=+146.929754499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.073917 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" podStartSLOduration=125.073898733 podStartE2EDuration="2m5.073898733s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.07374797 +0000 UTC m=+146.500787741" watchObservedRunningTime="2025-12-03 12:07:29.073898733 +0000 UTC m=+146.500938503" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.105946 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-trxnq" podStartSLOduration=125.10593269 podStartE2EDuration="2m5.10593269s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.103852409 +0000 UTC m=+146.530892179" watchObservedRunningTime="2025-12-03 12:07:29.10593269 +0000 UTC m=+146.532972459" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.108737 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.109737 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.609722696 +0000 UTC m=+147.036762466 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.201838 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.210321 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.211509 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.710648168 +0000 UTC m=+147.137687938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.273983 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cfw6s" podStartSLOduration=125.273967796 podStartE2EDuration="2m5.273967796s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.272307021 +0000 UTC m=+146.699346792" watchObservedRunningTime="2025-12-03 12:07:29.273967796 +0000 UTC m=+146.701007566" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.279817 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.311736 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.312436 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.812418954 +0000 UTC m=+147.239458724 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.415234 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.415708 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:29.915692969 +0000 UTC m=+147.342732739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.516257 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.516897 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.016880533 +0000 UTC m=+147.443920303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.528159 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" podStartSLOduration=125.528134044 podStartE2EDuration="2m5.528134044s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.517179574 +0000 UTC m=+146.944219344" watchObservedRunningTime="2025-12-03 12:07:29.528134044 +0000 UTC m=+146.955173814" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.628262 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.628715 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.128701303 +0000 UTC m=+147.555741074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.692977 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" podStartSLOduration=125.692960854 podStartE2EDuration="2m5.692960854s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.664702518 +0000 UTC m=+147.091742289" watchObservedRunningTime="2025-12-03 12:07:29.692960854 +0000 UTC m=+147.120000625" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.693871 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.705750 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-mc4zp" podStartSLOduration=125.705728996 podStartE2EDuration="2m5.705728996s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:29.703079407 +0000 UTC m=+147.130119178" watchObservedRunningTime="2025-12-03 12:07:29.705728996 +0000 UTC m=+147.132768766" Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.729839 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.730172 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.230158062 +0000 UTC m=+147.657197822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.835213 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.835836 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.335824413 +0000 UTC m=+147.762864183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.843547 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2xzp"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.857214 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.889372 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.930976 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7"] Dec 03 12:07:29 crc kubenswrapper[4591]: I1203 12:07:29.939051 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:29 crc kubenswrapper[4591]: E1203 12:07:29.939523 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.439506343 +0000 UTC m=+147.866546103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.052119 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.052489 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.552459338 +0000 UTC m=+147.979499108 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.101938 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dr86v"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.137263 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" podStartSLOduration=125.137235237 podStartE2EDuration="2m5.137235237s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.119986906 +0000 UTC m=+147.547026675" watchObservedRunningTime="2025-12-03 12:07:30.137235237 +0000 UTC m=+147.564275008" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.139676 4591 generic.go:334] "Generic (PLEG): container finished" podID="7c70c478-feff-4a08-9522-96297d92ba2b" containerID="e6e4a7329c051cb0ae966834fadbe7f57be83524d61bc539a3ea88c249b08a3d" exitCode=0 Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.140112 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" event={"ID":"7c70c478-feff-4a08-9522-96297d92ba2b","Type":"ContainerDied","Data":"e6e4a7329c051cb0ae966834fadbe7f57be83524d61bc539a3ea88c249b08a3d"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.140171 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" event={"ID":"7c70c478-feff-4a08-9522-96297d92ba2b","Type":"ContainerStarted","Data":"fb061444de55cc45c27aa3cf5df33d956e5a4b27721741c23695606622e4c48d"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.140289 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.140314 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.151490 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" event={"ID":"e72f6cd2-29a9-4921-bee2-229f9fd19774","Type":"ContainerStarted","Data":"fdeda38699cf1e56f9e0fdea36e33af058ac6eab7a2ddfffbff84d83e51256b3"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.153275 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.153802 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.653783797 +0000 UTC m=+148.080823567 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.162376 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cnc8r" event={"ID":"3d1889fc-cf0c-4114-8653-a7b95c23bdd5","Type":"ContainerStarted","Data":"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.175170 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.189207 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" event={"ID":"3efc6c62-2216-4e59-ac20-9aa55fbd8a38","Type":"ContainerStarted","Data":"c0432dc19898764f92f57b985a64e89ee497ccc85e0d9370eae4a7cd4264c662"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.208156 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"077a381a83d392547604050dd514c54b2b59d92d1b88616fa8693ad929a439ee"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.208213 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4dfe27d91a7629b12745c4d7a3861d4694231d0931763856acb2a590fce117e6"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.208873 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.218323 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gdw9n"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.235194 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-xg6n2"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.235241 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.240250 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" podStartSLOduration=126.240237113 podStartE2EDuration="2m6.240237113s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.236638885 +0000 UTC m=+147.663678656" watchObservedRunningTime="2025-12-03 12:07:30.240237113 +0000 UTC m=+147.667276883" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.241223 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6dd49e5c348716dc40ecfcd1dc82f68e4a8c1ec1882c19054e73d914800cebae"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.256097 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.257517 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" event={"ID":"fbbd067f-5929-4ab1-aecb-ab19da21aa5f","Type":"ContainerStarted","Data":"4da78eac4c4be43e85db47b009f86a9c701cb687ab0d01e0534884d2ff1b95b6"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.257561 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" event={"ID":"fbbd067f-5929-4ab1-aecb-ab19da21aa5f","Type":"ContainerStarted","Data":"bc5b5e95400776ba86370543fc2e14b7089f2612df8ad487372a5903909ca2f4"} Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.257961 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.757949675 +0000 UTC m=+148.184989435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.269908 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" event={"ID":"9f8ca899-dbb0-4e48-b783-3e4e8337505f","Type":"ContainerStarted","Data":"e5611a145aae43ce6f83c3b930580d2daa18b3a0162a4d1cd5ee17744da462ef"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.286914 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.314133 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" event={"ID":"2e0ab3d0-1659-4b4f-878e-97fa8e1940e2","Type":"ContainerStarted","Data":"dffe4aa6bfb361df847aac7344bbfb162a84ef0c12301542099dcb4a332da22c"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.315024 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.316922 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" podStartSLOduration=126.316903146 podStartE2EDuration="2m6.316903146s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.315348611 +0000 UTC m=+147.742388381" watchObservedRunningTime="2025-12-03 12:07:30.316903146 +0000 UTC m=+147.743942916" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.347892 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-868m7"] Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.347947 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" event={"ID":"f46fb0dc-2855-48f1-b744-1b5cc24f00e9","Type":"ContainerStarted","Data":"fe7717003749f70b41f005603fdd7ecfcb29c3e19cbdc8894d912d7eda648f4e"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.357818 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.358928 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.858910403 +0000 UTC m=+148.285950173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.365893 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2wq84" podStartSLOduration=126.365873989 podStartE2EDuration="2m6.365873989s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.347031439 +0000 UTC m=+147.774071209" watchObservedRunningTime="2025-12-03 12:07:30.365873989 +0000 UTC m=+147.792913759" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.373892 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" event={"ID":"830deeec-973c-4826-9357-b341f6a4b399","Type":"ContainerStarted","Data":"225897a1be300c22288ae9bcbc74e516b7138e083aa5e42f2fba5fbd80a0ecc9"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.385697 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" event={"ID":"4e92b06f-86a5-4db3-a4ea-81b914d29d9d","Type":"ContainerStarted","Data":"776570ea9e7720adf36f0b9942d6e0e48830d069a9c87a9eca464924bc64212e"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.392458 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-fp692"] Dec 03 12:07:30 crc kubenswrapper[4591]: W1203 12:07:30.406132 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode104273c_2ed5_4e1f_ae08_33412e0cfd00.slice/crio-a39255320d837c0cbbb007df1a1370b1676ffb249e663394ee8ab72990e2841d WatchSource:0}: Error finding container a39255320d837c0cbbb007df1a1370b1676ffb249e663394ee8ab72990e2841d: Status 404 returned error can't find the container with id a39255320d837c0cbbb007df1a1370b1676ffb249e663394ee8ab72990e2841d Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.406316 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-bvpwm" event={"ID":"d242eec9-e870-47a6-9eae-548e4634af2a","Type":"ContainerStarted","Data":"903a664ed82959f8e5057aabc809693b7f014e0f9b97fdb2d6f7c3d6943a3f3a"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.414611 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"53a7edd886d1cecca3c095f9c488574eb6ac037176f0e259dc60c3741a00be9a"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.426484 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8n547" event={"ID":"671f4fe9-fef4-40d1-9379-6d90d29320f4","Type":"ContainerStarted","Data":"309160ae6ac732d4fde8abfe7729e8be72cc048879742c747c6e567eccec14b1"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.430418 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" event={"ID":"49493d9d-b35e-4a1e-8ecb-730ec9793700","Type":"ContainerStarted","Data":"ee16e5e3d68432393bd32b22313b410bfaa3ea13d04a0348b3c112cfab820d13"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.455761 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" event={"ID":"78feea03-d859-44c3-8832-a765ac762e2c","Type":"ContainerStarted","Data":"7b5538442fc2013dad9be6904d5426b857efabedf8b311d9137d57bb41801c1d"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.461988 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.463607 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:30.963590274 +0000 UTC m=+148.390630045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.477889 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" event={"ID":"b655df3b-049f-4f7e-9f17-9d84bb008c0a","Type":"ContainerStarted","Data":"f560117b58ff95d28bc472f5eb4b26f4f337894e8f637710f44e140938efd698"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.478499 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-cnc8r" podStartSLOduration=126.478482247 podStartE2EDuration="2m6.478482247s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.476534866 +0000 UTC m=+147.903574636" watchObservedRunningTime="2025-12-03 12:07:30.478482247 +0000 UTC m=+147.905522017" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.504043 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" event={"ID":"068983ac-e598-4d3c-aafe-43d86b49bd4a","Type":"ContainerStarted","Data":"f416b83c1b282251c8ded2618a0e7917fd46e08f6f8542ed3881f12e2dea78ba"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.506215 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dchgs" event={"ID":"047dfe24-1216-41aa-93be-7d434cb0fff6","Type":"ContainerStarted","Data":"f0c8148baf9c60ba44a2c4cee8ca974c912a0d00d2487bc6c202a85ab78a0cfa"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.516124 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" event={"ID":"352d91de-d668-423d-af61-44fced909890","Type":"ContainerStarted","Data":"5a48d70b327ba7a9421de0d0df106daf1e688f306ceb077597d5801fe006f402"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.518925 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" event={"ID":"ab6e3a66-178d-4d51-b9f5-0ed128342a20","Type":"ContainerStarted","Data":"092864c2d987d7ca5814e92fd3901183d147e970a14c73661708bbcba2a0cb69"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.520774 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.522450 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" event={"ID":"8156e38b-b819-40eb-8022-403bd494a981","Type":"ContainerStarted","Data":"6e13237bc5bf03ae8e9981b8d41a71fb628595a810a0b346a606b8a86ebfef5c"} Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.526955 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.552550 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m7nk6" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.556693 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" podStartSLOduration=126.556682067 podStartE2EDuration="2m6.556682067s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.555802628 +0000 UTC m=+147.982842399" watchObservedRunningTime="2025-12-03 12:07:30.556682067 +0000 UTC m=+147.983721837" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.563618 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.564696 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.064674734 +0000 UTC m=+148.491714505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.597843 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" podStartSLOduration=126.597830202 podStartE2EDuration="2m6.597830202s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.597607936 +0000 UTC m=+148.024647706" watchObservedRunningTime="2025-12-03 12:07:30.597830202 +0000 UTC m=+148.024869963" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.666412 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.687679 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.187643739 +0000 UTC m=+148.614683509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.690474 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-xc9nk" podStartSLOduration=126.690445612 podStartE2EDuration="2m6.690445612s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.690324996 +0000 UTC m=+148.117364767" watchObservedRunningTime="2025-12-03 12:07:30.690445612 +0000 UTC m=+148.117485382" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.756546 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" podStartSLOduration=126.75652726 podStartE2EDuration="2m6.75652726s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.756363122 +0000 UTC m=+148.183402892" watchObservedRunningTime="2025-12-03 12:07:30.75652726 +0000 UTC m=+148.183567029" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.771441 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.771991 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.271969154 +0000 UTC m=+148.699008924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.815495 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" podStartSLOduration=126.815473188 podStartE2EDuration="2m6.815473188s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.812389755 +0000 UTC m=+148.239429525" watchObservedRunningTime="2025-12-03 12:07:30.815473188 +0000 UTC m=+148.242512957" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.878905 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.879556 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.37954168 +0000 UTC m=+148.806581449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.891688 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-n46k2" podStartSLOduration=126.891671453 podStartE2EDuration="2m6.891671453s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.859418868 +0000 UTC m=+148.286458638" watchObservedRunningTime="2025-12-03 12:07:30.891671453 +0000 UTC m=+148.318711223" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.971563 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" podStartSLOduration=126.971542638 podStartE2EDuration="2m6.971542638s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.925791892 +0000 UTC m=+148.352831662" watchObservedRunningTime="2025-12-03 12:07:30.971542638 +0000 UTC m=+148.398582408" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.971693 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-dchgs" podStartSLOduration=126.971687419 podStartE2EDuration="2m6.971687419s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:30.971310442 +0000 UTC m=+148.398350212" watchObservedRunningTime="2025-12-03 12:07:30.971687419 +0000 UTC m=+148.398727189" Dec 03 12:07:30 crc kubenswrapper[4591]: I1203 12:07:30.979747 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:30 crc kubenswrapper[4591]: E1203 12:07:30.980170 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.480156358 +0000 UTC m=+148.907196129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.081789 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.082186 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.582174008 +0000 UTC m=+149.009213778 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.140453 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.145299 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:31 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:31 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:31 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.145340 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.182399 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.182841 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.682829503 +0000 UTC m=+149.109869273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.285589 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.286377 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.786365249 +0000 UTC m=+149.213405020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.388584 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.388788 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.888742834 +0000 UTC m=+149.315782604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.388881 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.389231 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.889222133 +0000 UTC m=+149.316261903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.489783 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.490013 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.989983076 +0000 UTC m=+149.417022846 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.490235 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.490613 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:31.990605333 +0000 UTC m=+149.417645103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.534662 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" event={"ID":"e72f6cd2-29a9-4921-bee2-229f9fd19774","Type":"ContainerStarted","Data":"2c7bce08fce1343e639d655dfc1ad8260ec91b0c3c880e29c80fd1a9087da9d8"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.535652 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.538870 4591 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-wfl6d container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" start-of-body= Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.538915 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" podUID="e72f6cd2-29a9-4921-bee2-229f9fd19774" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.549586 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" event={"ID":"f26d8d0d-6d96-432b-9921-ffa4af508729","Type":"ContainerStarted","Data":"2e20bef9127288e5871898a1f7df74d80dd70f6eb1075db57680c276fee8c47f"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.549640 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" event={"ID":"f26d8d0d-6d96-432b-9921-ffa4af508729","Type":"ContainerStarted","Data":"01b1bbc021e16c1dfd63a84f8963968073cade6082c4c4af271245a271971143"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.561017 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" podStartSLOduration=127.561003915 podStartE2EDuration="2m7.561003915s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.560085032 +0000 UTC m=+148.987124803" watchObservedRunningTime="2025-12-03 12:07:31.561003915 +0000 UTC m=+148.988043685" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.563708 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-czcvh" event={"ID":"f46fb0dc-2855-48f1-b744-1b5cc24f00e9","Type":"ContainerStarted","Data":"1c52b5ade801fbffb2f827fd76dca17c302ccb34ea3e503abaf169465ad9cc30"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.581279 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dr86v" podStartSLOduration=126.581267541 podStartE2EDuration="2m6.581267541s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.57986952 +0000 UTC m=+149.006909289" watchObservedRunningTime="2025-12-03 12:07:31.581267541 +0000 UTC m=+149.008307310" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.587622 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b98hz" event={"ID":"49493d9d-b35e-4a1e-8ecb-730ec9793700","Type":"ContainerStarted","Data":"b7822263c0904f58825c2b56b0f4bd1f283043cbd144611449a2297a7fa70500"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.591575 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.591706 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.091688541 +0000 UTC m=+149.518728311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.591836 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.592587 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.092575964 +0000 UTC m=+149.519615734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.606431 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" event={"ID":"b655df3b-049f-4f7e-9f17-9d84bb008c0a","Type":"ContainerStarted","Data":"8904f3b7524328aa5540e53c8d187d5027b4c9959dfd883ab3236053a61c040f"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.614960 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" event={"ID":"830deeec-973c-4826-9357-b341f6a4b399","Type":"ContainerStarted","Data":"2c3e0c360adfcc6e1135e0bc7a50582bc8fe399a09fe57c71dea6ff69d3ee9dc"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.614992 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" event={"ID":"830deeec-973c-4826-9357-b341f6a4b399","Type":"ContainerStarted","Data":"e5cfe6f994ee1529b6e3ed56e631d88f7b99c6216ed15f99e4aaf23ab7c4b478"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.616731 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fp692" event={"ID":"e43b9c0c-5961-4226-adf7-59660279b3a0","Type":"ContainerStarted","Data":"e82633ed7a1c51177144bb68a3d8af19574ec201c85167c0eb49eaabc74d9d95"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.616764 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fp692" event={"ID":"e43b9c0c-5961-4226-adf7-59660279b3a0","Type":"ContainerStarted","Data":"7f0bceeead0a56139a6285feb48b008909e15f8b63670ca815f87c2317e96817"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.624478 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" event={"ID":"25566b9c-ba66-4db9-be25-b7cf8f913de6","Type":"ContainerStarted","Data":"7d9aa2b312dcbb8d77db141567c41ce6c4da99d117efe0e146cf1862cae9252f"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.624505 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" event={"ID":"25566b9c-ba66-4db9-be25-b7cf8f913de6","Type":"ContainerStarted","Data":"9733dfffc8249b314f0faf57bc19626aae995b2ec9b3e06f926be5417ce21543"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.648272 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-bvpwm" event={"ID":"d242eec9-e870-47a6-9eae-548e4634af2a","Type":"ContainerStarted","Data":"533f1a51d371e613d7653d76948bb34071a7527c4b9a006470a204b0344b5337"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.656438 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" event={"ID":"06a84866-8b8f-46f7-ae01-f3c5929a5814","Type":"ContainerStarted","Data":"f261bd7af4f91bb599839a316407a4969e3f5c24ef1ce35b1f0a71ca8c5e70db"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.656482 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" event={"ID":"06a84866-8b8f-46f7-ae01-f3c5929a5814","Type":"ContainerStarted","Data":"32d9d2adce20969b71841bdc090374b94e32b2b5eee1fc84c0c39fd82a8acbf3"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.656498 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.657738 4591 patch_prober.go:28] interesting pod/console-operator-58897d9998-xg6n2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.657786 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" podUID="06a84866-8b8f-46f7-ae01-f3c5929a5814" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.665771 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" event={"ID":"352d91de-d668-423d-af61-44fced909890","Type":"ContainerStarted","Data":"b6c996295b8fdbc73f0d78cee01f33d53a444cdbfd8e8e2bc11ce8f79cbd4d3d"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.681373 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-8d6cq" podStartSLOduration=127.681352888 podStartE2EDuration="2m7.681352888s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.648211715 +0000 UTC m=+149.075251485" watchObservedRunningTime="2025-12-03 12:07:31.681352888 +0000 UTC m=+149.108392657" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.682231 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" event={"ID":"39c241dd-677f-4270-b941-a71c3fab94d9","Type":"ContainerStarted","Data":"5187e229007db2e5d930d51d77e67eb6a40a53cd9a2e7f2c7af2c4d60463b2c0"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.693226 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-64znj" podStartSLOduration=127.693210691 podStartE2EDuration="2m7.693210691s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.680998923 +0000 UTC m=+149.108038694" watchObservedRunningTime="2025-12-03 12:07:31.693210691 +0000 UTC m=+149.120250461" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.695660 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.693379 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.696643 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.693434 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.193421987 +0000 UTC m=+149.620461757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.697148 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.699689 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" event={"ID":"4e92b06f-86a5-4db3-a4ea-81b914d29d9d","Type":"ContainerStarted","Data":"dc6519f2b2ff9d16ccd751d1e4282683909556c217bda9fbb3978e77492967e6"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.699724 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" event={"ID":"4e92b06f-86a5-4db3-a4ea-81b914d29d9d","Type":"ContainerStarted","Data":"2f5c980a11c233b024ecf5d763aee1322e3410bd1962850e06d6e24acdc3bd5b"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.704375 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.705000 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.204979869 +0000 UTC m=+149.632019639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.717009 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" podStartSLOduration=127.716990681 podStartE2EDuration="2m7.716990681s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.713221542 +0000 UTC m=+149.140261313" watchObservedRunningTime="2025-12-03 12:07:31.716990681 +0000 UTC m=+149.144030451" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.733896 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.742519 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" event={"ID":"9f8ca899-dbb0-4e48-b783-3e4e8337505f","Type":"ContainerStarted","Data":"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.743439 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.750953 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-bvpwm" podStartSLOduration=6.750937783 podStartE2EDuration="6.750937783s" podCreationTimestamp="2025-12-03 12:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.743659677 +0000 UTC m=+149.170699446" watchObservedRunningTime="2025-12-03 12:07:31.750937783 +0000 UTC m=+149.177977553" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.764647 4591 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-849sq container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.764704 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.775587 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-868m7" event={"ID":"136811dd-de89-44c9-a2b2-01bdabecec20","Type":"ContainerStarted","Data":"7b81bf3a1f7928c965802edc0620c5f9eb646fd3d02ab625ff51df440dc9cbc7"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.775643 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-868m7" event={"ID":"136811dd-de89-44c9-a2b2-01bdabecec20","Type":"ContainerStarted","Data":"2856328f3b21c7d8428fab262c80a93c57d2b6771b51f4004c6a69db38991f01"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.802514 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.802823 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr7gr\" (UniqueName: \"kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.802844 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.302817361 +0000 UTC m=+149.729857130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.803153 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.803227 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.810260 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vbtjn" podStartSLOduration=127.810234518 podStartE2EDuration="2m7.810234518s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.79116439 +0000 UTC m=+149.218204160" watchObservedRunningTime="2025-12-03 12:07:31.810234518 +0000 UTC m=+149.237274289" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.819445 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" event={"ID":"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc","Type":"ContainerStarted","Data":"796f03f14d5db8cec627b4a8e9678052aed51ff110009ae37bce302ffc5662f0"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.819497 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" event={"ID":"2b68e7d7-8f51-434e-a8bc-e2f3e03a18cc","Type":"ContainerStarted","Data":"91f12d9d2e1dd20ba5649c3d7688b61d8be6b7ed35cda5db86fb2b966c8b4e91"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.839591 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" event={"ID":"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248","Type":"ContainerStarted","Data":"69ca320e76817b54afe366b8fc61e6feca9129ac08efa89085d0b80e2e7dd774"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.839638 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" event={"ID":"a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248","Type":"ContainerStarted","Data":"8df7c5632751c6a52572f0d4d31439463ae63dca42de7c7f3836e372ceb7c1fc"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.841593 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-fv4vx" podStartSLOduration=126.841575685 podStartE2EDuration="2m6.841575685s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.839965737 +0000 UTC m=+149.267005506" watchObservedRunningTime="2025-12-03 12:07:31.841575685 +0000 UTC m=+149.268615455" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.842790 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" podStartSLOduration=126.842782818 podStartE2EDuration="2m6.842782818s" podCreationTimestamp="2025-12-03 12:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.81922199 +0000 UTC m=+149.246261760" watchObservedRunningTime="2025-12-03 12:07:31.842782818 +0000 UTC m=+149.269822588" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.851001 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" event={"ID":"98a13519-a12b-4d1a-817e-aa1a67e8f289","Type":"ContainerStarted","Data":"e6fbf5a172aaa49b1fce117b9fa8dd4d9077a78effef0746a09f9179e719e3b8"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.851038 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" event={"ID":"98a13519-a12b-4d1a-817e-aa1a67e8f289","Type":"ContainerStarted","Data":"bc4a81c502b8a5ae3c3e1d4458bed25e85ef3a805fb4156bfa7bb6d0facaecc5"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.851050 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" event={"ID":"98a13519-a12b-4d1a-817e-aa1a67e8f289","Type":"ContainerStarted","Data":"2e2b3445a384618e09cfbfdc23a7160d4426a2df7da716f1d4e649b11d7f19d8"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.865295 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-g9gz4" event={"ID":"068983ac-e598-4d3c-aafe-43d86b49bd4a","Type":"ContainerStarted","Data":"c62e8e73252254f828c0fe8bb1810bdca317c51b2e9372a80c870aa6ab1e983e"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.865547 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" podStartSLOduration=127.865536102 podStartE2EDuration="2m7.865536102s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.865299649 +0000 UTC m=+149.292339419" watchObservedRunningTime="2025-12-03 12:07:31.865536102 +0000 UTC m=+149.292575872" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.877901 4591 generic.go:334] "Generic (PLEG): container finished" podID="3efc6c62-2216-4e59-ac20-9aa55fbd8a38" containerID="dd088e65d87ecdf45022d7bc3a5fb81af96f0251a90d0dd51117a7bbdefebb26" exitCode=0 Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.877966 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" event={"ID":"3efc6c62-2216-4e59-ac20-9aa55fbd8a38","Type":"ContainerDied","Data":"dd088e65d87ecdf45022d7bc3a5fb81af96f0251a90d0dd51117a7bbdefebb26"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.898559 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-24xcm" event={"ID":"8156e38b-b819-40eb-8022-403bd494a981","Type":"ContainerStarted","Data":"c092bf703489b94dd1874f7fc678e48722a18cad7c9a5eda97121c8048807663"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.901439 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.903721 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.903777 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.903825 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.903885 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr7gr\" (UniqueName: \"kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.906318 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.906474 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7ftqp" podStartSLOduration=127.906454657 podStartE2EDuration="2m7.906454657s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.904279579 +0000 UTC m=+149.331319349" watchObservedRunningTime="2025-12-03 12:07:31.906454657 +0000 UTC m=+149.333494428" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.907422 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: E1203 12:07:31.908098 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.408054006 +0000 UTC m=+149.835093776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.908815 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.909548 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.915097 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" event={"ID":"448fff50-3aec-4b69-8a4c-8d5862cb5fe1","Type":"ContainerStarted","Data":"1cad3f6adc8422b02626d53c6b6ae7dbb71578fab70483798ed8b6e11c014f88"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.915134 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" event={"ID":"448fff50-3aec-4b69-8a4c-8d5862cb5fe1","Type":"ContainerStarted","Data":"8b05d01499504619ec2ada8614135a851103ae4fb6e46c96f2ea70980d8b7ae7"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.938480 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.942994 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr7gr\" (UniqueName: \"kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr\") pod \"certified-operators-mm725\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.950908 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gdw9n" event={"ID":"741b2760-bc39-47b7-9a01-59c46af6e440","Type":"ContainerStarted","Data":"5e7cdf21695c40b17e2fc5c24681615847c170d793d9ab75caec6c330cdb1869"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.950939 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gdw9n" event={"ID":"741b2760-bc39-47b7-9a01-59c46af6e440","Type":"ContainerStarted","Data":"203c27aa2ad155fee2556aa51b1a4c6d9e854bd182ff855197650c933e54fe7b"} Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.951516 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.958344 4591 patch_prober.go:28] interesting pod/downloads-7954f5f757-gdw9n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.958655 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gdw9n" podUID="741b2760-bc39-47b7-9a01-59c46af6e440" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.959732 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-868m7" podStartSLOduration=6.959721065 podStartE2EDuration="6.959721065s" podCreationTimestamp="2025-12-03 12:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.925120648 +0000 UTC m=+149.352160417" watchObservedRunningTime="2025-12-03 12:07:31.959721065 +0000 UTC m=+149.386760835" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.967582 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" podStartSLOduration=127.967572047 podStartE2EDuration="2m7.967572047s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.942367747 +0000 UTC m=+149.369407516" watchObservedRunningTime="2025-12-03 12:07:31.967572047 +0000 UTC m=+149.394611816" Dec 03 12:07:31 crc kubenswrapper[4591]: I1203 12:07:31.979717 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" event={"ID":"bff50f24-9c88-4db0-bf44-e9a5a4b36431","Type":"ContainerStarted","Data":"8e6583589a8a6c2b3f64dd35060c15d11b805cd4844fb7fb896c137ca6d9ed34"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.005524 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-kwvmh" podStartSLOduration=128.005501808 podStartE2EDuration="2m8.005501808s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:31.991721708 +0000 UTC m=+149.418761479" watchObservedRunningTime="2025-12-03 12:07:32.005501808 +0000 UTC m=+149.432541577" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.007517 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.007932 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r72q\" (UniqueName: \"kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.007970 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.007994 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.008810 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.508793208 +0000 UTC m=+149.935832978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.015628 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0bed0126af2a3d7290982cb2b2925f241b1bfb8309c07dd8c9e4965d4d17dbd2"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.019826 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.078364 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" event={"ID":"e15736ce-de7b-465b-8150-e52bfe141765","Type":"ContainerStarted","Data":"2cb3954254afa9622586f03425cad3160b6fd7dc046d215c3c4e75cf59bfbf0d"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.078426 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" event={"ID":"e15736ce-de7b-465b-8150-e52bfe141765","Type":"ContainerStarted","Data":"2de60f815b2723f72a0136c548ee619eb4ca64f03a8d92c02c74e0b6c1140363"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.078440 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" event={"ID":"e15736ce-de7b-465b-8150-e52bfe141765","Type":"ContainerStarted","Data":"acf1ff469a9058bcb5b1f2ab06be10e01f1aa98189315d813fa963d9c8f01a91"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.079761 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6x4k7" podStartSLOduration=128.079742283 podStartE2EDuration="2m8.079742283s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:32.079431991 +0000 UTC m=+149.506471761" watchObservedRunningTime="2025-12-03 12:07:32.079742283 +0000 UTC m=+149.506782053" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.079886 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qlgfr" podStartSLOduration=128.079881644 podStartE2EDuration="2m8.079881644s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:32.046169522 +0000 UTC m=+149.473209292" watchObservedRunningTime="2025-12-03 12:07:32.079881644 +0000 UTC m=+149.506921424" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.097940 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" event={"ID":"e104273c-2ed5-4e1f-ae08-33412e0cfd00","Type":"ContainerStarted","Data":"91b3715c79bc651e0026d3eb1dec1976b81c5ae61ef29bca2505ceae13b83ff5"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.097970 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" event={"ID":"e104273c-2ed5-4e1f-ae08-33412e0cfd00","Type":"ContainerStarted","Data":"a39255320d837c0cbbb007df1a1370b1676ffb249e663394ee8ab72990e2841d"} Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.099910 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.101191 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.109043 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r72q\" (UniqueName: \"kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.109088 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.109108 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.109161 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.109428 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.609417065 +0000 UTC m=+150.036456835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.109921 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.110182 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.111035 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pjthj" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.117358 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.133177 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r72q\" (UniqueName: \"kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q\") pod \"community-operators-xzn8f\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.147208 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:32 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:32 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:32 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.147245 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.203596 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-gdw9n" podStartSLOduration=128.20357663 podStartE2EDuration="2m8.20357663s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:32.176252485 +0000 UTC m=+149.603292255" watchObservedRunningTime="2025-12-03 12:07:32.20357663 +0000 UTC m=+149.630616400" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.210502 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.210784 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.210976 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.211370 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fb6z\" (UniqueName: \"kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.212102 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.712082138 +0000 UTC m=+150.139121898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.260380 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.287531 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d26b4" podStartSLOduration=128.287484412 podStartE2EDuration="2m8.287484412s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:32.268685102 +0000 UTC m=+149.695724873" watchObservedRunningTime="2025-12-03 12:07:32.287484412 +0000 UTC m=+149.714524183" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.292892 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.294528 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.298030 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.298626 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zntmm" podStartSLOduration=128.298602791 podStartE2EDuration="2m8.298602791s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:32.292809657 +0000 UTC m=+149.719849417" watchObservedRunningTime="2025-12-03 12:07:32.298602791 +0000 UTC m=+149.725642560" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.312661 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fb6z\" (UniqueName: \"kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.312727 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.312758 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.312799 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.314653 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.81463443 +0000 UTC m=+150.241674200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.315776 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.320808 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.335047 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fb6z\" (UniqueName: \"kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z\") pod \"certified-operators-zmn84\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.413981 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.414238 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.414274 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.914247802 +0000 UTC m=+150.341287572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.414314 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.414400 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.414509 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2fng\" (UniqueName: \"kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.414802 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:32.914793565 +0000 UTC m=+150.341833335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.446286 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.468776 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.492272 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.492324 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.505942 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.516379 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.516783 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2fng\" (UniqueName: \"kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.516819 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.516849 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.517221 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.517294 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.017281577 +0000 UTC m=+150.444321347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.517716 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.553995 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2fng\" (UniqueName: \"kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng\") pod \"community-operators-c8mct\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.617683 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.618152 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.118139793 +0000 UTC m=+150.545179563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.636674 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.639273 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:07:32 crc kubenswrapper[4591]: W1203 12:07:32.698128 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83de920c_74b3_4ab0_bdbc_71c95d354fc7.slice/crio-1e53d257185fff0c8d85c6594efdb1844e5bdb46a92b437507f350d28f3004b7 WatchSource:0}: Error finding container 1e53d257185fff0c8d85c6594efdb1844e5bdb46a92b437507f350d28f3004b7: Status 404 returned error can't find the container with id 1e53d257185fff0c8d85c6594efdb1844e5bdb46a92b437507f350d28f3004b7 Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.720320 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.720603 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.220590524 +0000 UTC m=+150.647630294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.821408 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.821822 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.321809757 +0000 UTC m=+150.748849527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.919104 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.923194 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.923757 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.423738249 +0000 UTC m=+150.850778019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: I1203 12:07:32.924003 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:32 crc kubenswrapper[4591]: E1203 12:07:32.924383 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.424371627 +0000 UTC m=+150.851411396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:32 crc kubenswrapper[4591]: W1203 12:07:32.992028 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10cc9749_3923_4002_a8eb_39bdbe040385.slice/crio-daaf24509f28683d2116ddde7a692d52a920262f7fe09b0337ed5fb0a8baa591 WatchSource:0}: Error finding container daaf24509f28683d2116ddde7a692d52a920262f7fe09b0337ed5fb0a8baa591: Status 404 returned error can't find the container with id daaf24509f28683d2116ddde7a692d52a920262f7fe09b0337ed5fb0a8baa591 Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.029289 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.029616 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.030085 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.530048968 +0000 UTC m=+150.957088739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: W1203 12:07:33.045687 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod001a9fff_2a3d_47fe_b28b_1c93bf0122e0.slice/crio-5ea401d20dbf03253d84cfcf5dbb6ef25a85852af793785ba26701f6b58a92ca WatchSource:0}: Error finding container 5ea401d20dbf03253d84cfcf5dbb6ef25a85852af793785ba26701f6b58a92ca: Status 404 returned error can't find the container with id 5ea401d20dbf03253d84cfcf5dbb6ef25a85852af793785ba26701f6b58a92ca Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.112340 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fp692" event={"ID":"e43b9c0c-5961-4226-adf7-59660279b3a0","Type":"ContainerStarted","Data":"d83bae64dc6e113d908b9efac5ba09443554c16babfd6a7681bfae640c876c09"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.113101 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-fp692" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.126103 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" event={"ID":"3efc6c62-2216-4e59-ac20-9aa55fbd8a38","Type":"ContainerStarted","Data":"896786d52ab6b3eac810e31a9f06ea73cd5294940d905684d247f5e48c5f8776"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.126175 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" event={"ID":"3efc6c62-2216-4e59-ac20-9aa55fbd8a38","Type":"ContainerStarted","Data":"287e0ce4030782c4a06e0c93f4cb5f7e2605c8d2d70cc4325644dfab1b41f893"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.128676 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerStarted","Data":"5ea401d20dbf03253d84cfcf5dbb6ef25a85852af793785ba26701f6b58a92ca"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.131027 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-fp692" podStartSLOduration=8.131006842 podStartE2EDuration="8.131006842s" podCreationTimestamp="2025-12-03 12:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:33.130491194 +0000 UTC m=+150.557530964" watchObservedRunningTime="2025-12-03 12:07:33.131006842 +0000 UTC m=+150.558046611" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.131311 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.131838 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.631821248 +0000 UTC m=+151.058861017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.138601 4591 generic.go:334] "Generic (PLEG): container finished" podID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerID="9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a" exitCode=0 Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.138729 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerDied","Data":"9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.138822 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerStarted","Data":"28489d4487009cb6577e2dd090cd1aa27ffe67e26c2102a787c1d89251976092"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.146047 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:33 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:33 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:33 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.146114 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.149082 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerStarted","Data":"daaf24509f28683d2116ddde7a692d52a920262f7fe09b0337ed5fb0a8baa591"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.149955 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.163409 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" podStartSLOduration=129.163393558 podStartE2EDuration="2m9.163393558s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:33.160774547 +0000 UTC m=+150.587814317" watchObservedRunningTime="2025-12-03 12:07:33.163393558 +0000 UTC m=+150.590433328" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.164486 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" event={"ID":"bff50f24-9c88-4db0-bf44-e9a5a4b36431","Type":"ContainerStarted","Data":"ad4683f1a86b45c0a24d0775d1b7bbc10dcf1039e3a594b8b1f213104a89414a"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.166372 4591 generic.go:334] "Generic (PLEG): container finished" podID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerID="a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9" exitCode=0 Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.167224 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerDied","Data":"a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.167242 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerStarted","Data":"1e53d257185fff0c8d85c6594efdb1844e5bdb46a92b437507f350d28f3004b7"} Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.172805 4591 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-849sq container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.172922 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.173847 4591 patch_prober.go:28] interesting pod/downloads-7954f5f757-gdw9n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.174005 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gdw9n" podUID="741b2760-bc39-47b7-9a01-59c46af6e440" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.193157 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6fvx9" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.193205 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.193219 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.195130 4591 patch_prober.go:28] interesting pod/apiserver-76f77b778f-kdctl container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.16:8443/livez\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.198036 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" podUID="3efc6c62-2216-4e59-ac20-9aa55fbd8a38" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.16:8443/livez\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.197281 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-wfl6d" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.200698 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-xg6n2" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.232277 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.234846 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.734828273 +0000 UTC m=+151.161868042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.342828 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.343247 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.843234221 +0000 UTC m=+151.270273991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.445845 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.446308 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:33.946290158 +0000 UTC m=+151.373329928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.548707 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.549472 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.049448617 +0000 UTC m=+151.476488387 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.618594 4591 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.650613 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.650792 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.150763768 +0000 UTC m=+151.577803538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.650897 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.651226 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.151213261 +0000 UTC m=+151.578253031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.752598 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.752826 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.2527876 +0000 UTC m=+151.679827370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.753398 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.753894 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.253867675 +0000 UTC m=+151.680907445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.854953 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.855046 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.355026975 +0000 UTC m=+151.782066745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.855502 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.855913 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.35590408 +0000 UTC m=+151.782943850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.887328 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.888820 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.894160 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.911801 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.959517 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.959676 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.459648466 +0000 UTC m=+151.886688237 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.960080 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.960154 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.960218 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:33 crc kubenswrapper[4591]: I1203 12:07:33.960257 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fngb2\" (UniqueName: \"kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:33 crc kubenswrapper[4591]: E1203 12:07:33.960639 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:07:34.460612394 +0000 UTC m=+151.887652165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7qzlf" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.001495 4591 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T12:07:33.618625367Z","Handler":null,"Name":""} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.013469 4591 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.013527 4591 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.061640 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.062109 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.062282 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.062354 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fngb2\" (UniqueName: \"kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.063315 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.063382 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.085725 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.091937 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fngb2\" (UniqueName: \"kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2\") pod \"redhat-marketplace-kjc6n\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.141730 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:34 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:34 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:34 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.141778 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.163195 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.169363 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.169407 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.191637 4591 generic.go:334] "Generic (PLEG): container finished" podID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerID="f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9" exitCode=0 Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.191765 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerDied","Data":"f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9"} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.195039 4591 generic.go:334] "Generic (PLEG): container finished" podID="10cc9749-3923-4002-a8eb-39bdbe040385" containerID="cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af" exitCode=0 Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.195086 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerDied","Data":"cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af"} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.206390 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.238931 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" event={"ID":"bff50f24-9c88-4db0-bf44-e9a5a4b36431","Type":"ContainerStarted","Data":"25752b89625f8323ee8d0d7c66c50ac84299458472c019c379fdaf1d48b6cd7a"} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.238992 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" event={"ID":"bff50f24-9c88-4db0-bf44-e9a5a4b36431","Type":"ContainerStarted","Data":"7847268b572d46fdbeaa9bdd22f137e6ffe2cf52afca2c5847e65b2a29701ed1"} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.239004 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" event={"ID":"bff50f24-9c88-4db0-bf44-e9a5a4b36431","Type":"ContainerStarted","Data":"39ee097def40b3e3df4d10019b4e21c40ab7761935cde556900ca5d5a3e026ee"} Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.239815 4591 patch_prober.go:28] interesting pod/downloads-7954f5f757-gdw9n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.239865 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gdw9n" podUID="741b2760-bc39-47b7-9a01-59c46af6e440" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.243428 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.289184 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.290367 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.303511 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.307388 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-q2xzp" podStartSLOduration=9.307374546 podStartE2EDuration="9.307374546s" podCreationTimestamp="2025-12-03 12:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:34.306642222 +0000 UTC m=+151.733681992" watchObservedRunningTime="2025-12-03 12:07:34.307374546 +0000 UTC m=+151.734414316" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.325774 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7qzlf\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.367292 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.367375 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xrtg\" (UniqueName: \"kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.368019 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.470802 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.470857 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xrtg\" (UniqueName: \"kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.470883 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.471289 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.473093 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.496744 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xrtg\" (UniqueName: \"kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg\") pod \"redhat-marketplace-j2dg6\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.538327 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:07:34 crc kubenswrapper[4591]: W1203 12:07:34.557206 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b035511_6f69_41ee_b874_77ae32b9a25b.slice/crio-c58fd8e3bc2718011f28cc3d55e63e43e13b9a93c0964768958f73513146a030 WatchSource:0}: Error finding container c58fd8e3bc2718011f28cc3d55e63e43e13b9a93c0964768958f73513146a030: Status 404 returned error can't find the container with id c58fd8e3bc2718011f28cc3d55e63e43e13b9a93c0964768958f73513146a030 Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.579318 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.631028 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.807055 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:07:34 crc kubenswrapper[4591]: W1203 12:07:34.824873 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a403cb5_eebc_4d01_990a_a3bbc24c2bfa.slice/crio-b2896ba38b9ac19ac5dedb86b43365df4d7558d599e10b46dfb649bce58d9fa1 WatchSource:0}: Error finding container b2896ba38b9ac19ac5dedb86b43365df4d7558d599e10b46dfb649bce58d9fa1: Status 404 returned error can't find the container with id b2896ba38b9ac19ac5dedb86b43365df4d7558d599e10b46dfb649bce58d9fa1 Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.878869 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.879929 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.883494 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.888945 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.900299 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.977865 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdmdw\" (UniqueName: \"kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.977925 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:34 crc kubenswrapper[4591]: I1203 12:07:34.977997 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.044372 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.078732 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.078814 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdmdw\" (UniqueName: \"kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.078845 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.079240 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.079615 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.104093 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdmdw\" (UniqueName: \"kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw\") pod \"redhat-operators-p2zbm\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.147459 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:35 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:35 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:35 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.147515 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.217905 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.254288 4591 generic.go:334] "Generic (PLEG): container finished" podID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerID="01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5" exitCode=0 Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.254406 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerDied","Data":"01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.254448 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerStarted","Data":"ef772411d50cac23eb7b3402ced50bb071447e7ebf6971228b58f8a8e1cba762"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.261153 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" event={"ID":"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa","Type":"ContainerStarted","Data":"eefda4ed8f40179d744f8bab21a8dbca4029918d9108977d8dd123e4623f11ff"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.261199 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" event={"ID":"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa","Type":"ContainerStarted","Data":"b2896ba38b9ac19ac5dedb86b43365df4d7558d599e10b46dfb649bce58d9fa1"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.261217 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.277610 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.280918 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.285106 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.296773 4591 generic.go:334] "Generic (PLEG): container finished" podID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerID="907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b" exitCode=0 Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.297743 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerDied","Data":"907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.297786 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerStarted","Data":"c58fd8e3bc2718011f28cc3d55e63e43e13b9a93c0964768958f73513146a030"} Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.308120 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" podStartSLOduration=131.308107344 podStartE2EDuration="2m11.308107344s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:35.291629577 +0000 UTC m=+152.718669357" watchObservedRunningTime="2025-12-03 12:07:35.308107344 +0000 UTC m=+152.735147114" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.388008 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.388308 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsjnb\" (UniqueName: \"kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.388401 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.483190 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.490170 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.490772 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsjnb\" (UniqueName: \"kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.490941 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.491040 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.491268 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: W1203 12:07:35.502922 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cff9cfc_1bad_4d45_a4ca_97c08dbcd7d0.slice/crio-52fda760a33b2a8a210ae12837b9d15e22f49064203361f4434a2dce69b0df3c WatchSource:0}: Error finding container 52fda760a33b2a8a210ae12837b9d15e22f49064203361f4434a2dce69b0df3c: Status 404 returned error can't find the container with id 52fda760a33b2a8a210ae12837b9d15e22f49064203361f4434a2dce69b0df3c Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.507106 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsjnb\" (UniqueName: \"kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb\") pod \"redhat-operators-g7k2p\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.599842 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:07:35 crc kubenswrapper[4591]: I1203 12:07:35.852956 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:07:35 crc kubenswrapper[4591]: W1203 12:07:35.884530 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ce86335_21dd_42f4_afbe_2adc9ceee368.slice/crio-1ef32c3a437866d4798d406b98946e9e5e5545bb9ff7d1b5d40a58a7ffddfa47 WatchSource:0}: Error finding container 1ef32c3a437866d4798d406b98946e9e5e5545bb9ff7d1b5d40a58a7ffddfa47: Status 404 returned error can't find the container with id 1ef32c3a437866d4798d406b98946e9e5e5545bb9ff7d1b5d40a58a7ffddfa47 Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.143110 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:36 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:36 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:36 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.143378 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.304149 4591 generic.go:334] "Generic (PLEG): container finished" podID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerID="8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447" exitCode=0 Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.304210 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerDied","Data":"8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447"} Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.304237 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerStarted","Data":"1ef32c3a437866d4798d406b98946e9e5e5545bb9ff7d1b5d40a58a7ffddfa47"} Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.310920 4591 generic.go:334] "Generic (PLEG): container finished" podID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerID="70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb" exitCode=0 Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.311753 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerDied","Data":"70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb"} Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.311779 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerStarted","Data":"52fda760a33b2a8a210ae12837b9d15e22f49064203361f4434a2dce69b0df3c"} Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.381954 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.382618 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.384364 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.387789 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.402497 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.512498 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.512572 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.614817 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.614884 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.614989 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.633971 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:36 crc kubenswrapper[4591]: I1203 12:07:36.703297 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.109285 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.141868 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:37 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:37 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:37 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.141936 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.320548 4591 generic.go:334] "Generic (PLEG): container finished" podID="25566b9c-ba66-4db9-be25-b7cf8f913de6" containerID="7d9aa2b312dcbb8d77db141567c41ce6c4da99d117efe0e146cf1862cae9252f" exitCode=0 Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.320603 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" event={"ID":"25566b9c-ba66-4db9-be25-b7cf8f913de6","Type":"ContainerDied","Data":"7d9aa2b312dcbb8d77db141567c41ce6c4da99d117efe0e146cf1862cae9252f"} Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.321862 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"603c7b88-46c6-45bb-85fc-9e29637e2b38","Type":"ContainerStarted","Data":"42d6b6cbbbe0b63658954ca3ac3d656c9a53aaa183060080fcde9883273b18ba"} Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.641220 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.643006 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.643757 4591 patch_prober.go:28] interesting pod/console-f9d7485db-cnc8r container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Dec 03 12:07:37 crc kubenswrapper[4591]: I1203 12:07:37.643805 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cnc8r" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.139969 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.142736 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:38 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:38 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:38 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.142810 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.210866 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.223858 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-kdctl" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.347825 4591 generic.go:334] "Generic (PLEG): container finished" podID="603c7b88-46c6-45bb-85fc-9e29637e2b38" containerID="015e57f40d37c9b60d5e6636111a968b2ab59a0626ddaf3ebd6f61e8696aceab" exitCode=0 Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.347915 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"603c7b88-46c6-45bb-85fc-9e29637e2b38","Type":"ContainerDied","Data":"015e57f40d37c9b60d5e6636111a968b2ab59a0626ddaf3ebd6f61e8696aceab"} Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.423732 4591 patch_prober.go:28] interesting pod/downloads-7954f5f757-gdw9n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.423773 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gdw9n" podUID="741b2760-bc39-47b7-9a01-59c46af6e440" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.423853 4591 patch_prober.go:28] interesting pod/downloads-7954f5f757-gdw9n container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.423900 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gdw9n" podUID="741b2760-bc39-47b7-9a01-59c46af6e440" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.622167 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.623236 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.628799 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.647192 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.651304 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.757645 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.757797 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.858970 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.859074 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.859230 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.884147 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:38 crc kubenswrapper[4591]: I1203 12:07:38.942029 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:39 crc kubenswrapper[4591]: I1203 12:07:39.143598 4591 patch_prober.go:28] interesting pod/router-default-5444994796-dchgs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:07:39 crc kubenswrapper[4591]: [-]has-synced failed: reason withheld Dec 03 12:07:39 crc kubenswrapper[4591]: [+]process-running ok Dec 03 12:07:39 crc kubenswrapper[4591]: healthz check failed Dec 03 12:07:39 crc kubenswrapper[4591]: I1203 12:07:39.143662 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dchgs" podUID="047dfe24-1216-41aa-93be-7d434cb0fff6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:07:40 crc kubenswrapper[4591]: I1203 12:07:40.147586 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:40 crc kubenswrapper[4591]: I1203 12:07:40.161295 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-dchgs" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.003977 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.024021 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.130348 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume\") pod \"25566b9c-ba66-4db9-be25-b7cf8f913de6\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.130396 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tnbn\" (UniqueName: \"kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn\") pod \"25566b9c-ba66-4db9-be25-b7cf8f913de6\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.130447 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access\") pod \"603c7b88-46c6-45bb-85fc-9e29637e2b38\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.130465 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir\") pod \"603c7b88-46c6-45bb-85fc-9e29637e2b38\" (UID: \"603c7b88-46c6-45bb-85fc-9e29637e2b38\") " Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.130531 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume\") pod \"25566b9c-ba66-4db9-be25-b7cf8f913de6\" (UID: \"25566b9c-ba66-4db9-be25-b7cf8f913de6\") " Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.131156 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "603c7b88-46c6-45bb-85fc-9e29637e2b38" (UID: "603c7b88-46c6-45bb-85fc-9e29637e2b38"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.131904 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume" (OuterVolumeSpecName: "config-volume") pod "25566b9c-ba66-4db9-be25-b7cf8f913de6" (UID: "25566b9c-ba66-4db9-be25-b7cf8f913de6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.135942 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "603c7b88-46c6-45bb-85fc-9e29637e2b38" (UID: "603c7b88-46c6-45bb-85fc-9e29637e2b38"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.137278 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn" (OuterVolumeSpecName: "kube-api-access-5tnbn") pod "25566b9c-ba66-4db9-be25-b7cf8f913de6" (UID: "25566b9c-ba66-4db9-be25-b7cf8f913de6"). InnerVolumeSpecName "kube-api-access-5tnbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.145557 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "25566b9c-ba66-4db9-be25-b7cf8f913de6" (UID: "25566b9c-ba66-4db9-be25-b7cf8f913de6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.232803 4591 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25566b9c-ba66-4db9-be25-b7cf8f913de6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.232836 4591 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25566b9c-ba66-4db9-be25-b7cf8f913de6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.232847 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tnbn\" (UniqueName: \"kubernetes.io/projected/25566b9c-ba66-4db9-be25-b7cf8f913de6-kube-api-access-5tnbn\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.232857 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/603c7b88-46c6-45bb-85fc-9e29637e2b38-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.232874 4591 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/603c7b88-46c6-45bb-85fc-9e29637e2b38-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.420697 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" event={"ID":"25566b9c-ba66-4db9-be25-b7cf8f913de6","Type":"ContainerDied","Data":"9733dfffc8249b314f0faf57bc19626aae995b2ec9b3e06f926be5417ce21543"} Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.420721 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412720-cxwmp" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.420746 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9733dfffc8249b314f0faf57bc19626aae995b2ec9b3e06f926be5417ce21543" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.422934 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"603c7b88-46c6-45bb-85fc-9e29637e2b38","Type":"ContainerDied","Data":"42d6b6cbbbe0b63658954ca3ac3d656c9a53aaa183060080fcde9883273b18ba"} Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.423006 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42d6b6cbbbe0b63658954ca3ac3d656c9a53aaa183060080fcde9883273b18ba" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.423087 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:07:43 crc kubenswrapper[4591]: I1203 12:07:43.730526 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-fp692" Dec 03 12:07:44 crc kubenswrapper[4591]: I1203 12:07:44.155966 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:07:44 crc kubenswrapper[4591]: W1203 12:07:44.174274 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6c544070_1f06_475b_8f4c_376d1aca2878.slice/crio-0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5 WatchSource:0}: Error finding container 0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5: Status 404 returned error can't find the container with id 0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5 Dec 03 12:07:44 crc kubenswrapper[4591]: I1203 12:07:44.448102 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6c544070-1f06-475b-8f4c-376d1aca2878","Type":"ContainerStarted","Data":"0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5"} Dec 03 12:07:45 crc kubenswrapper[4591]: I1203 12:07:45.457438 4591 generic.go:334] "Generic (PLEG): container finished" podID="6c544070-1f06-475b-8f4c-376d1aca2878" containerID="7658f3391a2054065e18db74f378bcf6efde49f902bdd71632f160b6978a6649" exitCode=0 Dec 03 12:07:45 crc kubenswrapper[4591]: I1203 12:07:45.457485 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6c544070-1f06-475b-8f4c-376d1aca2878","Type":"ContainerDied","Data":"7658f3391a2054065e18db74f378bcf6efde49f902bdd71632f160b6978a6649"} Dec 03 12:07:46 crc kubenswrapper[4591]: I1203 12:07:46.400515 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:46 crc kubenswrapper[4591]: I1203 12:07:46.417002 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8cd44649-dee5-4a99-8123-059f30fd0c1b-metrics-certs\") pod \"network-metrics-daemon-5drvq\" (UID: \"8cd44649-dee5-4a99-8123-059f30fd0c1b\") " pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:46 crc kubenswrapper[4591]: I1203 12:07:46.504126 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5drvq" Dec 03 12:07:47 crc kubenswrapper[4591]: I1203 12:07:47.645150 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:47 crc kubenswrapper[4591]: I1203 12:07:47.649684 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.422092 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-gdw9n" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.529313 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.626258 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access\") pod \"6c544070-1f06-475b-8f4c-376d1aca2878\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.626423 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir\") pod \"6c544070-1f06-475b-8f4c-376d1aca2878\" (UID: \"6c544070-1f06-475b-8f4c-376d1aca2878\") " Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.626475 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6c544070-1f06-475b-8f4c-376d1aca2878" (UID: "6c544070-1f06-475b-8f4c-376d1aca2878"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.626788 4591 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c544070-1f06-475b-8f4c-376d1aca2878-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.631076 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6c544070-1f06-475b-8f4c-376d1aca2878" (UID: "6c544070-1f06-475b-8f4c-376d1aca2878"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:07:48 crc kubenswrapper[4591]: I1203 12:07:48.728403 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c544070-1f06-475b-8f4c-376d1aca2878-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:07:49 crc kubenswrapper[4591]: I1203 12:07:49.480009 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6c544070-1f06-475b-8f4c-376d1aca2878","Type":"ContainerDied","Data":"0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5"} Dec 03 12:07:49 crc kubenswrapper[4591]: I1203 12:07:49.480052 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d539b74cc60a64576d7eeee4d928a81f8d52c243802e2333829b5b3440fefe5" Dec 03 12:07:49 crc kubenswrapper[4591]: I1203 12:07:49.480094 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:07:54 crc kubenswrapper[4591]: I1203 12:07:54.586337 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:07:54 crc kubenswrapper[4591]: I1203 12:07:54.938945 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5drvq"] Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.299516 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.299573 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.520388 4591 generic.go:334] "Generic (PLEG): container finished" podID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerID="4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.520470 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerDied","Data":"4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.522238 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5drvq" event={"ID":"8cd44649-dee5-4a99-8123-059f30fd0c1b","Type":"ContainerStarted","Data":"22336130f90b3c09383f21e129c286de41084a7a351cb9c0321a1983c65a0498"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.522264 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5drvq" event={"ID":"8cd44649-dee5-4a99-8123-059f30fd0c1b","Type":"ContainerStarted","Data":"65d040057368b3120de3204f6693e6ae95ee7d21c5fa9f499ec6e22e4f1acc2b"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.526772 4591 generic.go:334] "Generic (PLEG): container finished" podID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerID="772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.526836 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerDied","Data":"772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.528802 4591 generic.go:334] "Generic (PLEG): container finished" podID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerID="0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.528885 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerDied","Data":"0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.532247 4591 generic.go:334] "Generic (PLEG): container finished" podID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerID="1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.532301 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerDied","Data":"1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.537742 4591 generic.go:334] "Generic (PLEG): container finished" podID="10cc9749-3923-4002-a8eb-39bdbe040385" containerID="52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.537794 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerDied","Data":"52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.542147 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerStarted","Data":"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.544721 4591 generic.go:334] "Generic (PLEG): container finished" podID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerID="46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.544768 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerDied","Data":"46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d"} Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.550927 4591 generic.go:334] "Generic (PLEG): container finished" podID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerID="4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3" exitCode=0 Dec 03 12:07:55 crc kubenswrapper[4591]: I1203 12:07:55.551089 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerDied","Data":"4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.560244 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerStarted","Data":"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.562416 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerStarted","Data":"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.564321 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5drvq" event={"ID":"8cd44649-dee5-4a99-8123-059f30fd0c1b","Type":"ContainerStarted","Data":"880885725d5ebca5bf2202e8588aaac16de3a8130ee293139079446d9a267797"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.566606 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerStarted","Data":"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.568496 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerStarted","Data":"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.570874 4591 generic.go:334] "Generic (PLEG): container finished" podID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerID="672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5" exitCode=0 Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.570940 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerDied","Data":"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.573586 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerStarted","Data":"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.576497 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerStarted","Data":"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.578603 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerStarted","Data":"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342"} Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.587267 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c8mct" podStartSLOduration=2.749916269 podStartE2EDuration="24.587248145s" podCreationTimestamp="2025-12-03 12:07:32 +0000 UTC" firstStartedPulling="2025-12-03 12:07:34.193887801 +0000 UTC m=+151.620927571" lastFinishedPulling="2025-12-03 12:07:56.031219677 +0000 UTC m=+173.458259447" observedRunningTime="2025-12-03 12:07:56.587106159 +0000 UTC m=+174.014145929" watchObservedRunningTime="2025-12-03 12:07:56.587248145 +0000 UTC m=+174.014287916" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.618096 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zmn84" podStartSLOduration=2.791517986 podStartE2EDuration="24.618057566s" podCreationTimestamp="2025-12-03 12:07:32 +0000 UTC" firstStartedPulling="2025-12-03 12:07:34.197364039 +0000 UTC m=+151.624403809" lastFinishedPulling="2025-12-03 12:07:56.023903619 +0000 UTC m=+173.450943389" observedRunningTime="2025-12-03 12:07:56.603716455 +0000 UTC m=+174.030756225" watchObservedRunningTime="2025-12-03 12:07:56.618057566 +0000 UTC m=+174.045097336" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.632315 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j2dg6" podStartSLOduration=1.850675574 podStartE2EDuration="22.632297376s" podCreationTimestamp="2025-12-03 12:07:34 +0000 UTC" firstStartedPulling="2025-12-03 12:07:35.257726797 +0000 UTC m=+152.684766567" lastFinishedPulling="2025-12-03 12:07:56.039348599 +0000 UTC m=+173.466388369" observedRunningTime="2025-12-03 12:07:56.630506408 +0000 UTC m=+174.057546178" watchObservedRunningTime="2025-12-03 12:07:56.632297376 +0000 UTC m=+174.059337146" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.647511 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p2zbm" podStartSLOduration=2.950116985 podStartE2EDuration="22.647493489s" podCreationTimestamp="2025-12-03 12:07:34 +0000 UTC" firstStartedPulling="2025-12-03 12:07:36.312972951 +0000 UTC m=+153.740012720" lastFinishedPulling="2025-12-03 12:07:56.010349454 +0000 UTC m=+173.437389224" observedRunningTime="2025-12-03 12:07:56.646699552 +0000 UTC m=+174.073739321" watchObservedRunningTime="2025-12-03 12:07:56.647493489 +0000 UTC m=+174.074533249" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.667663 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kjc6n" podStartSLOduration=2.881760371 podStartE2EDuration="23.667645827s" podCreationTimestamp="2025-12-03 12:07:33 +0000 UTC" firstStartedPulling="2025-12-03 12:07:35.303706002 +0000 UTC m=+152.730745772" lastFinishedPulling="2025-12-03 12:07:56.089591458 +0000 UTC m=+173.516631228" observedRunningTime="2025-12-03 12:07:56.664190558 +0000 UTC m=+174.091230328" watchObservedRunningTime="2025-12-03 12:07:56.667645827 +0000 UTC m=+174.094685596" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.680343 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mm725" podStartSLOduration=2.698717946 podStartE2EDuration="25.680320273s" podCreationTimestamp="2025-12-03 12:07:31 +0000 UTC" firstStartedPulling="2025-12-03 12:07:33.141157114 +0000 UTC m=+150.568196883" lastFinishedPulling="2025-12-03 12:07:56.12275944 +0000 UTC m=+173.549799210" observedRunningTime="2025-12-03 12:07:56.679129119 +0000 UTC m=+174.106168888" watchObservedRunningTime="2025-12-03 12:07:56.680320273 +0000 UTC m=+174.107360042" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.695582 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xzn8f" podStartSLOduration=2.855569483 podStartE2EDuration="25.695565268s" podCreationTimestamp="2025-12-03 12:07:31 +0000 UTC" firstStartedPulling="2025-12-03 12:07:33.19340385 +0000 UTC m=+150.620443620" lastFinishedPulling="2025-12-03 12:07:56.033399644 +0000 UTC m=+173.460439405" observedRunningTime="2025-12-03 12:07:56.692385284 +0000 UTC m=+174.119425054" watchObservedRunningTime="2025-12-03 12:07:56.695565268 +0000 UTC m=+174.122605038" Dec 03 12:07:56 crc kubenswrapper[4591]: I1203 12:07:56.706663 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-5drvq" podStartSLOduration=152.706654661 podStartE2EDuration="2m32.706654661s" podCreationTimestamp="2025-12-03 12:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:07:56.705269112 +0000 UTC m=+174.132308882" watchObservedRunningTime="2025-12-03 12:07:56.706654661 +0000 UTC m=+174.133694431" Dec 03 12:07:57 crc kubenswrapper[4591]: I1203 12:07:57.587179 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerStarted","Data":"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5"} Dec 03 12:07:57 crc kubenswrapper[4591]: I1203 12:07:57.603977 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g7k2p" podStartSLOduration=1.8886398039999999 podStartE2EDuration="22.60395669s" podCreationTimestamp="2025-12-03 12:07:35 +0000 UTC" firstStartedPulling="2025-12-03 12:07:36.305448212 +0000 UTC m=+153.732487982" lastFinishedPulling="2025-12-03 12:07:57.020765098 +0000 UTC m=+174.447804868" observedRunningTime="2025-12-03 12:07:57.600919264 +0000 UTC m=+175.027959034" watchObservedRunningTime="2025-12-03 12:07:57.60395669 +0000 UTC m=+175.030996460" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.021113 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.021651 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.096266 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.261271 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.261346 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.293783 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.447825 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.447888 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.480596 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.640345 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.640402 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.673718 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.673883 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.674085 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:02 crc kubenswrapper[4591]: I1203 12:08:02.676814 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:03 crc kubenswrapper[4591]: I1203 12:08:03.678956 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.206900 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.206965 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.242440 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.368384 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.632286 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.632355 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.652508 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zmn84" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="registry-server" containerID="cri-o://6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182" gracePeriod=2 Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.685554 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.688401 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:04 crc kubenswrapper[4591]: I1203 12:08:04.729080 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.060128 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.165898 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities\") pod \"10cc9749-3923-4002-a8eb-39bdbe040385\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.166035 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content\") pod \"10cc9749-3923-4002-a8eb-39bdbe040385\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.166196 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fb6z\" (UniqueName: \"kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z\") pod \"10cc9749-3923-4002-a8eb-39bdbe040385\" (UID: \"10cc9749-3923-4002-a8eb-39bdbe040385\") " Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.166913 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities" (OuterVolumeSpecName: "utilities") pod "10cc9749-3923-4002-a8eb-39bdbe040385" (UID: "10cc9749-3923-4002-a8eb-39bdbe040385"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.172651 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z" (OuterVolumeSpecName: "kube-api-access-4fb6z") pod "10cc9749-3923-4002-a8eb-39bdbe040385" (UID: "10cc9749-3923-4002-a8eb-39bdbe040385"). InnerVolumeSpecName "kube-api-access-4fb6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.204735 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10cc9749-3923-4002-a8eb-39bdbe040385" (UID: "10cc9749-3923-4002-a8eb-39bdbe040385"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.218388 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.218502 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.256838 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.267686 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.267715 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10cc9749-3923-4002-a8eb-39bdbe040385-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.267731 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fb6z\" (UniqueName: \"kubernetes.io/projected/10cc9749-3923-4002-a8eb-39bdbe040385-kube-api-access-4fb6z\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.349922 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.600459 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.600495 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.637280 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.662247 4591 generic.go:334] "Generic (PLEG): container finished" podID="10cc9749-3923-4002-a8eb-39bdbe040385" containerID="6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182" exitCode=0 Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.662387 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerDied","Data":"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182"} Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.662468 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmn84" event={"ID":"10cc9749-3923-4002-a8eb-39bdbe040385","Type":"ContainerDied","Data":"daaf24509f28683d2116ddde7a692d52a920262f7fe09b0337ed5fb0a8baa591"} Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.662502 4591 scope.go:117] "RemoveContainer" containerID="6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.662423 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmn84" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.663584 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c8mct" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="registry-server" containerID="cri-o://dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8" gracePeriod=2 Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.683452 4591 scope.go:117] "RemoveContainer" containerID="52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.692934 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.701580 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zmn84"] Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.707550 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.709565 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.711336 4591 scope.go:117] "RemoveContainer" containerID="cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.776031 4591 scope.go:117] "RemoveContainer" containerID="6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182" Dec 03 12:08:05 crc kubenswrapper[4591]: E1203 12:08:05.776602 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182\": container with ID starting with 6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182 not found: ID does not exist" containerID="6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.776652 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182"} err="failed to get container status \"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182\": rpc error: code = NotFound desc = could not find container \"6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182\": container with ID starting with 6916b43b7e4071c23a0b378a5172cd055966c2bfa7defb00d44d358cd7057182 not found: ID does not exist" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.776702 4591 scope.go:117] "RemoveContainer" containerID="52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f" Dec 03 12:08:05 crc kubenswrapper[4591]: E1203 12:08:05.777007 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f\": container with ID starting with 52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f not found: ID does not exist" containerID="52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.777042 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f"} err="failed to get container status \"52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f\": rpc error: code = NotFound desc = could not find container \"52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f\": container with ID starting with 52fea31b50ecac2de261371b1d02456f611938b343304de28447a53cf51b930f not found: ID does not exist" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.777156 4591 scope.go:117] "RemoveContainer" containerID="cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af" Dec 03 12:08:05 crc kubenswrapper[4591]: E1203 12:08:05.777427 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af\": container with ID starting with cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af not found: ID does not exist" containerID="cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af" Dec 03 12:08:05 crc kubenswrapper[4591]: I1203 12:08:05.777450 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af"} err="failed to get container status \"cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af\": rpc error: code = NotFound desc = could not find container \"cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af\": container with ID starting with cf075ae23cb377760f24826bc829fd40f7849fedf8afbd553360819ab2d006af not found: ID does not exist" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.077960 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.132669 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.177993 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities\") pod \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.178181 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content\") pod \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.178213 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2fng\" (UniqueName: \"kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng\") pod \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\" (UID: \"001a9fff-2a3d-47fe-b28b-1c93bf0122e0\") " Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.178628 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities" (OuterVolumeSpecName: "utilities") pod "001a9fff-2a3d-47fe-b28b-1c93bf0122e0" (UID: "001a9fff-2a3d-47fe-b28b-1c93bf0122e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.184256 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng" (OuterVolumeSpecName: "kube-api-access-t2fng") pod "001a9fff-2a3d-47fe-b28b-1c93bf0122e0" (UID: "001a9fff-2a3d-47fe-b28b-1c93bf0122e0"). InnerVolumeSpecName "kube-api-access-t2fng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.219432 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "001a9fff-2a3d-47fe-b28b-1c93bf0122e0" (UID: "001a9fff-2a3d-47fe-b28b-1c93bf0122e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.280091 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.280121 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.280131 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2fng\" (UniqueName: \"kubernetes.io/projected/001a9fff-2a3d-47fe-b28b-1c93bf0122e0-kube-api-access-t2fng\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.671627 4591 generic.go:334] "Generic (PLEG): container finished" podID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerID="dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8" exitCode=0 Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.671722 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerDied","Data":"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8"} Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.671742 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8mct" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.672137 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8mct" event={"ID":"001a9fff-2a3d-47fe-b28b-1c93bf0122e0","Type":"ContainerDied","Data":"5ea401d20dbf03253d84cfcf5dbb6ef25a85852af793785ba26701f6b58a92ca"} Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.672176 4591 scope.go:117] "RemoveContainer" containerID="dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.705340 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.714661 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c8mct"] Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.715623 4591 scope.go:117] "RemoveContainer" containerID="0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.733750 4591 scope.go:117] "RemoveContainer" containerID="f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.751692 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.752007 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j2dg6" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="registry-server" containerID="cri-o://bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3" gracePeriod=2 Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.755395 4591 scope.go:117] "RemoveContainer" containerID="dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8" Dec 03 12:08:06 crc kubenswrapper[4591]: E1203 12:08:06.755874 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8\": container with ID starting with dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8 not found: ID does not exist" containerID="dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.755907 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8"} err="failed to get container status \"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8\": rpc error: code = NotFound desc = could not find container \"dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8\": container with ID starting with dd535cc8393673b8bcd35c2982bc157e33133ac483862881104d8d5adefd5ba8 not found: ID does not exist" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.755930 4591 scope.go:117] "RemoveContainer" containerID="0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3" Dec 03 12:08:06 crc kubenswrapper[4591]: E1203 12:08:06.756332 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3\": container with ID starting with 0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3 not found: ID does not exist" containerID="0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.756354 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3"} err="failed to get container status \"0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3\": rpc error: code = NotFound desc = could not find container \"0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3\": container with ID starting with 0bc0e7ef533d8b920f5ed6df4dbda73400d2e06846ea7136905c9d3bcac829b3 not found: ID does not exist" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.756369 4591 scope.go:117] "RemoveContainer" containerID="f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9" Dec 03 12:08:06 crc kubenswrapper[4591]: E1203 12:08:06.756557 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9\": container with ID starting with f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9 not found: ID does not exist" containerID="f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.756579 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9"} err="failed to get container status \"f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9\": rpc error: code = NotFound desc = could not find container \"f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9\": container with ID starting with f114b6ece098c8dea42acd3caf582f3618180c202d7a8c8cbcc959a83ef424f9 not found: ID does not exist" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.917523 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" path="/var/lib/kubelet/pods/001a9fff-2a3d-47fe-b28b-1c93bf0122e0/volumes" Dec 03 12:08:06 crc kubenswrapper[4591]: I1203 12:08:06.919390 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" path="/var/lib/kubelet/pods/10cc9749-3923-4002-a8eb-39bdbe040385/volumes" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.191771 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.293398 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities\") pod \"e30ae515-5cf5-42d5-a315-75399fa0eade\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.293563 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content\") pod \"e30ae515-5cf5-42d5-a315-75399fa0eade\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.293635 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xrtg\" (UniqueName: \"kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg\") pod \"e30ae515-5cf5-42d5-a315-75399fa0eade\" (UID: \"e30ae515-5cf5-42d5-a315-75399fa0eade\") " Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.294245 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities" (OuterVolumeSpecName: "utilities") pod "e30ae515-5cf5-42d5-a315-75399fa0eade" (UID: "e30ae515-5cf5-42d5-a315-75399fa0eade"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.298242 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg" (OuterVolumeSpecName: "kube-api-access-7xrtg") pod "e30ae515-5cf5-42d5-a315-75399fa0eade" (UID: "e30ae515-5cf5-42d5-a315-75399fa0eade"). InnerVolumeSpecName "kube-api-access-7xrtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.310295 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e30ae515-5cf5-42d5-a315-75399fa0eade" (UID: "e30ae515-5cf5-42d5-a315-75399fa0eade"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.395948 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.395984 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xrtg\" (UniqueName: \"kubernetes.io/projected/e30ae515-5cf5-42d5-a315-75399fa0eade-kube-api-access-7xrtg\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.395999 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30ae515-5cf5-42d5-a315-75399fa0eade-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.683865 4591 generic.go:334] "Generic (PLEG): container finished" podID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerID="bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3" exitCode=0 Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.683942 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2dg6" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.683950 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerDied","Data":"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3"} Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.684017 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2dg6" event={"ID":"e30ae515-5cf5-42d5-a315-75399fa0eade","Type":"ContainerDied","Data":"ef772411d50cac23eb7b3402ced50bb071447e7ebf6971228b58f8a8e1cba762"} Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.684047 4591 scope.go:117] "RemoveContainer" containerID="bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.693049 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-9vfpm" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.701320 4591 scope.go:117] "RemoveContainer" containerID="4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.731848 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.731952 4591 scope.go:117] "RemoveContainer" containerID="01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.735341 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2dg6"] Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.744595 4591 scope.go:117] "RemoveContainer" containerID="bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3" Dec 03 12:08:07 crc kubenswrapper[4591]: E1203 12:08:07.745021 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3\": container with ID starting with bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3 not found: ID does not exist" containerID="bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.745056 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3"} err="failed to get container status \"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3\": rpc error: code = NotFound desc = could not find container \"bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3\": container with ID starting with bddb77edb0124d121dc7c27c18aed9102bf3c164840d480544637edacabbdea3 not found: ID does not exist" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.745102 4591 scope.go:117] "RemoveContainer" containerID="4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3" Dec 03 12:08:07 crc kubenswrapper[4591]: E1203 12:08:07.745548 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3\": container with ID starting with 4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3 not found: ID does not exist" containerID="4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.745593 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3"} err="failed to get container status \"4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3\": rpc error: code = NotFound desc = could not find container \"4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3\": container with ID starting with 4a5d232b1ffd305cfc2f106098c1a939c194f9cf08ac785244ded12d70c967f3 not found: ID does not exist" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.745631 4591 scope.go:117] "RemoveContainer" containerID="01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5" Dec 03 12:08:07 crc kubenswrapper[4591]: E1203 12:08:07.745985 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5\": container with ID starting with 01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5 not found: ID does not exist" containerID="01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5" Dec 03 12:08:07 crc kubenswrapper[4591]: I1203 12:08:07.746033 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5"} err="failed to get container status \"01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5\": rpc error: code = NotFound desc = could not find container \"01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5\": container with ID starting with 01b130028ed71e835d2e5a1f1fc729e2e18436d60c223105140582971e7116e5 not found: ID does not exist" Dec 03 12:08:08 crc kubenswrapper[4591]: I1203 12:08:08.016900 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:08:08 crc kubenswrapper[4591]: I1203 12:08:08.896943 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" path="/var/lib/kubelet/pods/e30ae515-5cf5-42d5-a315-75399fa0eade/volumes" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.148712 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.148981 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g7k2p" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="registry-server" containerID="cri-o://96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5" gracePeriod=2 Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.597498 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.705638 4591 generic.go:334] "Generic (PLEG): container finished" podID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerID="96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5" exitCode=0 Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.705689 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerDied","Data":"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5"} Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.705715 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g7k2p" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.705730 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g7k2p" event={"ID":"7ce86335-21dd-42f4-afbe-2adc9ceee368","Type":"ContainerDied","Data":"1ef32c3a437866d4798d406b98946e9e5e5545bb9ff7d1b5d40a58a7ffddfa47"} Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.705753 4591 scope.go:117] "RemoveContainer" containerID="96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.720849 4591 scope.go:117] "RemoveContainer" containerID="672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.730612 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content\") pod \"7ce86335-21dd-42f4-afbe-2adc9ceee368\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.730662 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities\") pod \"7ce86335-21dd-42f4-afbe-2adc9ceee368\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.730719 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsjnb\" (UniqueName: \"kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb\") pod \"7ce86335-21dd-42f4-afbe-2adc9ceee368\" (UID: \"7ce86335-21dd-42f4-afbe-2adc9ceee368\") " Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.732642 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities" (OuterVolumeSpecName: "utilities") pod "7ce86335-21dd-42f4-afbe-2adc9ceee368" (UID: "7ce86335-21dd-42f4-afbe-2adc9ceee368"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.736634 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb" (OuterVolumeSpecName: "kube-api-access-bsjnb") pod "7ce86335-21dd-42f4-afbe-2adc9ceee368" (UID: "7ce86335-21dd-42f4-afbe-2adc9ceee368"). InnerVolumeSpecName "kube-api-access-bsjnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.741999 4591 scope.go:117] "RemoveContainer" containerID="8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.758915 4591 scope.go:117] "RemoveContainer" containerID="96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5" Dec 03 12:08:09 crc kubenswrapper[4591]: E1203 12:08:09.759499 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5\": container with ID starting with 96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5 not found: ID does not exist" containerID="96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.759542 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5"} err="failed to get container status \"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5\": rpc error: code = NotFound desc = could not find container \"96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5\": container with ID starting with 96d7169b7601b1dde24e3c272d6fa7d8f046b47e66e6c774f3eb028425fc9ab5 not found: ID does not exist" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.759568 4591 scope.go:117] "RemoveContainer" containerID="672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5" Dec 03 12:08:09 crc kubenswrapper[4591]: E1203 12:08:09.760078 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5\": container with ID starting with 672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5 not found: ID does not exist" containerID="672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.760107 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5"} err="failed to get container status \"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5\": rpc error: code = NotFound desc = could not find container \"672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5\": container with ID starting with 672a977378cf3cf675f4057ed0bbc209f1ac5ba6b507e43fd2361513782558d5 not found: ID does not exist" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.760129 4591 scope.go:117] "RemoveContainer" containerID="8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447" Dec 03 12:08:09 crc kubenswrapper[4591]: E1203 12:08:09.760458 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447\": container with ID starting with 8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447 not found: ID does not exist" containerID="8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.760479 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447"} err="failed to get container status \"8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447\": rpc error: code = NotFound desc = could not find container \"8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447\": container with ID starting with 8ca24c477bd064fa657bc83104e998efe0e6066f93b23e7e2d7ccc921efa5447 not found: ID does not exist" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.810789 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7ce86335-21dd-42f4-afbe-2adc9ceee368" (UID: "7ce86335-21dd-42f4-afbe-2adc9ceee368"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.832379 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.832405 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ce86335-21dd-42f4-afbe-2adc9ceee368-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:09 crc kubenswrapper[4591]: I1203 12:08:09.832415 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsjnb\" (UniqueName: \"kubernetes.io/projected/7ce86335-21dd-42f4-afbe-2adc9ceee368-kube-api-access-bsjnb\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:10 crc kubenswrapper[4591]: I1203 12:08:10.038185 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:08:10 crc kubenswrapper[4591]: I1203 12:08:10.040941 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g7k2p"] Dec 03 12:08:10 crc kubenswrapper[4591]: I1203 12:08:10.899417 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" path="/var/lib/kubelet/pods/7ce86335-21dd-42f4-afbe-2adc9ceee368/volumes" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216399 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216848 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216861 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216873 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216879 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216886 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25566b9c-ba66-4db9-be25-b7cf8f913de6" containerName="collect-profiles" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216892 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="25566b9c-ba66-4db9-be25-b7cf8f913de6" containerName="collect-profiles" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216899 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216904 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216911 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216916 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216924 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216929 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216935 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216941 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216950 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216955 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216961 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c544070-1f06-475b-8f4c-376d1aca2878" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216967 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c544070-1f06-475b-8f4c-376d1aca2878" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216985 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.216991 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.216999 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217005 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.217014 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="603c7b88-46c6-45bb-85fc-9e29637e2b38" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217019 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="603c7b88-46c6-45bb-85fc-9e29637e2b38" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.217029 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217035 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.217044 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217049 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="extract-content" Dec 03 12:08:13 crc kubenswrapper[4591]: E1203 12:08:13.217096 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217102 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="extract-utilities" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217194 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="001a9fff-2a3d-47fe-b28b-1c93bf0122e0" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217205 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c544070-1f06-475b-8f4c-376d1aca2878" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217212 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce86335-21dd-42f4-afbe-2adc9ceee368" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217222 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="10cc9749-3923-4002-a8eb-39bdbe040385" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217230 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30ae515-5cf5-42d5-a315-75399fa0eade" containerName="registry-server" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217237 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="25566b9c-ba66-4db9-be25-b7cf8f913de6" containerName="collect-profiles" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217246 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="603c7b88-46c6-45bb-85fc-9e29637e2b38" containerName="pruner" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.217626 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.222659 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.222774 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.232742 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.371830 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.372093 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.473854 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.473947 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.474075 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.493226 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.530942 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:13 crc kubenswrapper[4591]: I1203 12:08:13.897189 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:08:14 crc kubenswrapper[4591]: I1203 12:08:14.754010 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8cc20ef8-69e7-466e-bb17-671d725ae36e","Type":"ContainerStarted","Data":"6a1084ba9d7e70ba83ae87c40d4dd95f64eaf5d1dcf2387f91662cdb12b43f47"} Dec 03 12:08:14 crc kubenswrapper[4591]: I1203 12:08:14.754400 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8cc20ef8-69e7-466e-bb17-671d725ae36e","Type":"ContainerStarted","Data":"a09d3f129dc999b66b9bc6883e97a917b085c7fb221ba8afd4ab5280f5aa0caf"} Dec 03 12:08:14 crc kubenswrapper[4591]: I1203 12:08:14.770498 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.770469662 podStartE2EDuration="1.770469662s" podCreationTimestamp="2025-12-03 12:08:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:08:14.766512581 +0000 UTC m=+192.193552351" watchObservedRunningTime="2025-12-03 12:08:14.770469662 +0000 UTC m=+192.197509431" Dec 03 12:08:15 crc kubenswrapper[4591]: I1203 12:08:15.761463 4591 generic.go:334] "Generic (PLEG): container finished" podID="8cc20ef8-69e7-466e-bb17-671d725ae36e" containerID="6a1084ba9d7e70ba83ae87c40d4dd95f64eaf5d1dcf2387f91662cdb12b43f47" exitCode=0 Dec 03 12:08:15 crc kubenswrapper[4591]: I1203 12:08:15.761534 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8cc20ef8-69e7-466e-bb17-671d725ae36e","Type":"ContainerDied","Data":"6a1084ba9d7e70ba83ae87c40d4dd95f64eaf5d1dcf2387f91662cdb12b43f47"} Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.064564 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.122832 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir\") pod \"8cc20ef8-69e7-466e-bb17-671d725ae36e\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.122945 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8cc20ef8-69e7-466e-bb17-671d725ae36e" (UID: "8cc20ef8-69e7-466e-bb17-671d725ae36e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.122982 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access\") pod \"8cc20ef8-69e7-466e-bb17-671d725ae36e\" (UID: \"8cc20ef8-69e7-466e-bb17-671d725ae36e\") " Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.123291 4591 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8cc20ef8-69e7-466e-bb17-671d725ae36e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.128159 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8cc20ef8-69e7-466e-bb17-671d725ae36e" (UID: "8cc20ef8-69e7-466e-bb17-671d725ae36e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.224827 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8cc20ef8-69e7-466e-bb17-671d725ae36e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.779195 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8cc20ef8-69e7-466e-bb17-671d725ae36e","Type":"ContainerDied","Data":"a09d3f129dc999b66b9bc6883e97a917b085c7fb221ba8afd4ab5280f5aa0caf"} Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.779257 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a09d3f129dc999b66b9bc6883e97a917b085c7fb221ba8afd4ab5280f5aa0caf" Dec 03 12:08:17 crc kubenswrapper[4591]: I1203 12:08:17.779260 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.217116 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:08:19 crc kubenswrapper[4591]: E1203 12:08:19.217405 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc20ef8-69e7-466e-bb17-671d725ae36e" containerName="pruner" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.217420 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc20ef8-69e7-466e-bb17-671d725ae36e" containerName="pruner" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.217548 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc20ef8-69e7-466e-bb17-671d725ae36e" containerName="pruner" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.218007 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.221656 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.221887 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.227368 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.353558 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.353649 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.353700 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.454172 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.454209 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.454267 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.454366 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.454482 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.468236 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.532474 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:19 crc kubenswrapper[4591]: I1203 12:08:19.920552 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:08:19 crc kubenswrapper[4591]: W1203 12:08:19.927500 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8c1e47a4_b770_4931_8ae0_f1e70ed1b680.slice/crio-184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e WatchSource:0}: Error finding container 184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e: Status 404 returned error can't find the container with id 184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e Dec 03 12:08:20 crc kubenswrapper[4591]: I1203 12:08:20.803488 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c1e47a4-b770-4931-8ae0-f1e70ed1b680","Type":"ContainerStarted","Data":"7cc588c9c07146700620c13756329941a167026f0ec2bde94673e0cbfdd723d4"} Dec 03 12:08:20 crc kubenswrapper[4591]: I1203 12:08:20.803987 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c1e47a4-b770-4931-8ae0-f1e70ed1b680","Type":"ContainerStarted","Data":"184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e"} Dec 03 12:08:20 crc kubenswrapper[4591]: I1203 12:08:20.820763 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.820739503 podStartE2EDuration="1.820739503s" podCreationTimestamp="2025-12-03 12:08:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:08:20.817335226 +0000 UTC m=+198.244374996" watchObservedRunningTime="2025-12-03 12:08:20.820739503 +0000 UTC m=+198.247779273" Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.299905 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.301232 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.301340 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.302193 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.302267 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf" gracePeriod=600 Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.832998 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf" exitCode=0 Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.833091 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf"} Dec 03 12:08:25 crc kubenswrapper[4591]: I1203 12:08:25.833567 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359"} Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.151527 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" podUID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" containerName="oauth-openshift" containerID="cri-o://0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03" gracePeriod=15 Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.490878 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.515549 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-58444664d6-nhbtp"] Dec 03 12:08:31 crc kubenswrapper[4591]: E1203 12:08:31.515787 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" containerName="oauth-openshift" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.515806 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" containerName="oauth-openshift" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.515915 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" containerName="oauth-openshift" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.516359 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.527214 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58444664d6-nhbtp"] Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597270 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597311 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597330 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597365 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597387 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597430 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597479 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597502 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhkl2\" (UniqueName: \"kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597549 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597573 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.597603 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598336 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598366 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598236 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598398 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login\") pod \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\" (UID: \"a76a4f2a-891a-4931-b1a6-49208d5b2c01\") " Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598525 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-dir\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598553 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598581 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598599 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598620 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598638 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbm5h\" (UniqueName: \"kubernetes.io/projected/c543bd14-6b10-45c3-8149-8118d3c6847a-kube-api-access-bbm5h\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598662 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-session\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598685 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-error\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598703 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598720 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-login\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598734 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-policies\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598750 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598770 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598773 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598794 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598769 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.598837 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.599144 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.599172 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.599185 4591 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.599198 4591 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a76a4f2a-891a-4931-b1a6-49208d5b2c01-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.599201 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.603484 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.603521 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2" (OuterVolumeSpecName: "kube-api-access-zhkl2") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "kube-api-access-zhkl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.603746 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.603986 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.604341 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.604466 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.604627 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.604747 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.604988 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a76a4f2a-891a-4931-b1a6-49208d5b2c01" (UID: "a76a4f2a-891a-4931-b1a6-49208d5b2c01"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700566 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-session\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700664 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-error\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700691 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700715 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-login\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700733 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-policies\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.700774 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701384 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701460 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701575 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-dir\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701624 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701743 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701769 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701831 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701865 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbm5h\" (UniqueName: \"kubernetes.io/projected/c543bd14-6b10-45c3-8149-8118d3c6847a-kube-api-access-bbm5h\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701944 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.701975 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702001 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702086 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702101 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702112 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702122 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702160 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702176 4591 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a76a4f2a-891a-4931-b1a6-49208d5b2c01-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702194 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhkl2\" (UniqueName: \"kubernetes.io/projected/a76a4f2a-891a-4931-b1a6-49208d5b2c01-kube-api-access-zhkl2\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.702346 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-dir\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.703154 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-service-ca\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.703203 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-audit-policies\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.703568 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-error\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.704247 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.704251 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-login\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.704400 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.704804 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.704924 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-session\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.705894 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.706178 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-router-certs\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.706370 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.706658 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c543bd14-6b10-45c3-8149-8118d3c6847a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.716310 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbm5h\" (UniqueName: \"kubernetes.io/projected/c543bd14-6b10-45c3-8149-8118d3c6847a-kube-api-access-bbm5h\") pod \"oauth-openshift-58444664d6-nhbtp\" (UID: \"c543bd14-6b10-45c3-8149-8118d3c6847a\") " pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.826950 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.867739 4591 generic.go:334] "Generic (PLEG): container finished" podID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" containerID="0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03" exitCode=0 Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.867820 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.867836 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" event={"ID":"a76a4f2a-891a-4931-b1a6-49208d5b2c01","Type":"ContainerDied","Data":"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03"} Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.868103 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-tr2zc" event={"ID":"a76a4f2a-891a-4931-b1a6-49208d5b2c01","Type":"ContainerDied","Data":"1c589abb94ff7c66d9ea476662e00cfd778335397f8383cdabc1d99377b47ba6"} Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.868130 4591 scope.go:117] "RemoveContainer" containerID="0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.887088 4591 scope.go:117] "RemoveContainer" containerID="0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03" Dec 03 12:08:31 crc kubenswrapper[4591]: E1203 12:08:31.891903 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03\": container with ID starting with 0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03 not found: ID does not exist" containerID="0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.891948 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03"} err="failed to get container status \"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03\": rpc error: code = NotFound desc = could not find container \"0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03\": container with ID starting with 0e99865d8707fc149fd601f496c589f4cb3a5b5e2c88bfed9d2de117b9e9ca03 not found: ID does not exist" Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.891982 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:08:31 crc kubenswrapper[4591]: I1203 12:08:31.894960 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-tr2zc"] Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.185089 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58444664d6-nhbtp"] Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.874528 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" event={"ID":"c543bd14-6b10-45c3-8149-8118d3c6847a","Type":"ContainerStarted","Data":"e87cb85a2e6c41a41b287d81847ed52b6a48715fed8d55e3fda1554cfb632b6a"} Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.874813 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" event={"ID":"c543bd14-6b10-45c3-8149-8118d3c6847a","Type":"ContainerStarted","Data":"a1d5ffb5fbf0847e982d1f263be59be700fd95f810a9bd18b611dbfaca4c3b0f"} Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.874830 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.879006 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.890451 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-58444664d6-nhbtp" podStartSLOduration=26.890433656 podStartE2EDuration="26.890433656s" podCreationTimestamp="2025-12-03 12:08:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:08:32.889134473 +0000 UTC m=+210.316174243" watchObservedRunningTime="2025-12-03 12:08:32.890433656 +0000 UTC m=+210.317473426" Dec 03 12:08:32 crc kubenswrapper[4591]: I1203 12:08:32.895180 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a76a4f2a-891a-4931-b1a6-49208d5b2c01" path="/var/lib/kubelet/pods/a76a4f2a-891a-4931-b1a6-49208d5b2c01/volumes" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.092641 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.093681 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mm725" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="registry-server" containerID="cri-o://b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d" gracePeriod=30 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.100356 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.100534 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xzn8f" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="registry-server" containerID="cri-o://2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66" gracePeriod=30 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.120846 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.121156 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" containerID="cri-o://e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf" gracePeriod=30 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.141401 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.141683 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kjc6n" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="registry-server" containerID="cri-o://18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d" gracePeriod=30 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.147646 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx9r"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.150121 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.157455 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.157748 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p2zbm" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="registry-server" containerID="cri-o://52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342" gracePeriod=30 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.159689 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx9r"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.222200 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kkjg\" (UniqueName: \"kubernetes.io/projected/36715c77-dc19-46de-b452-6f43fef4b296-kube-api-access-7kkjg\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.222628 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.222670 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.326124 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kkjg\" (UniqueName: \"kubernetes.io/projected/36715c77-dc19-46de-b452-6f43fef4b296-kube-api-access-7kkjg\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.326281 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.326316 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.330453 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.331839 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/36715c77-dc19-46de-b452-6f43fef4b296-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.342444 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kkjg\" (UniqueName: \"kubernetes.io/projected/36715c77-dc19-46de-b452-6f43fef4b296-kube-api-access-7kkjg\") pod \"marketplace-operator-79b997595-lrx9r\" (UID: \"36715c77-dc19-46de-b452-6f43fef4b296\") " pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.497821 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.504308 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.530578 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.533654 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.537502 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.541485 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632531 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r72q\" (UniqueName: \"kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q\") pod \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632834 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content\") pod \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632859 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics\") pod \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632886 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr7gr\" (UniqueName: \"kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr\") pod \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632920 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities\") pod \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632959 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content\") pod \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.632981 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fngb2\" (UniqueName: \"kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2\") pod \"7b035511-6f69-41ee-b874-77ae32b9a25b\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.633099 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities\") pod \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.633621 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities" (OuterVolumeSpecName: "utilities") pod "83de920c-74b3-4ab0-bdbc-71c95d354fc7" (UID: "83de920c-74b3-4ab0-bdbc-71c95d354fc7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.633778 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities" (OuterVolumeSpecName: "utilities") pod "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" (UID: "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635662 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdmdw\" (UniqueName: \"kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw\") pod \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\" (UID: \"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635701 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49jwd\" (UniqueName: \"kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd\") pod \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635724 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities\") pod \"7b035511-6f69-41ee-b874-77ae32b9a25b\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635775 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca\") pod \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\" (UID: \"9f8ca899-dbb0-4e48-b783-3e4e8337505f\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635798 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities\") pod \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\" (UID: \"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635818 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content\") pod \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\" (UID: \"83de920c-74b3-4ab0-bdbc-71c95d354fc7\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.635834 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content\") pod \"7b035511-6f69-41ee-b874-77ae32b9a25b\" (UID: \"7b035511-6f69-41ee-b874-77ae32b9a25b\") " Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.636271 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.636286 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.638343 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2" (OuterVolumeSpecName: "kube-api-access-fngb2") pod "7b035511-6f69-41ee-b874-77ae32b9a25b" (UID: "7b035511-6f69-41ee-b874-77ae32b9a25b"). InnerVolumeSpecName "kube-api-access-fngb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.638734 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr" (OuterVolumeSpecName: "kube-api-access-kr7gr") pod "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" (UID: "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d"). InnerVolumeSpecName "kube-api-access-kr7gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639054 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9f8ca899-dbb0-4e48-b783-3e4e8337505f" (UID: "9f8ca899-dbb0-4e48-b783-3e4e8337505f"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639206 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q" (OuterVolumeSpecName: "kube-api-access-4r72q") pod "83de920c-74b3-4ab0-bdbc-71c95d354fc7" (UID: "83de920c-74b3-4ab0-bdbc-71c95d354fc7"). InnerVolumeSpecName "kube-api-access-4r72q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639322 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities" (OuterVolumeSpecName: "utilities") pod "7b035511-6f69-41ee-b874-77ae32b9a25b" (UID: "7b035511-6f69-41ee-b874-77ae32b9a25b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639538 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9f8ca899-dbb0-4e48-b783-3e4e8337505f" (UID: "9f8ca899-dbb0-4e48-b783-3e4e8337505f"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639564 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw" (OuterVolumeSpecName: "kube-api-access-pdmdw") pod "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" (UID: "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0"). InnerVolumeSpecName "kube-api-access-pdmdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.639580 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities" (OuterVolumeSpecName: "utilities") pod "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" (UID: "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.648634 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd" (OuterVolumeSpecName: "kube-api-access-49jwd") pod "9f8ca899-dbb0-4e48-b783-3e4e8337505f" (UID: "9f8ca899-dbb0-4e48-b783-3e4e8337505f"). InnerVolumeSpecName "kube-api-access-49jwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.667182 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b035511-6f69-41ee-b874-77ae32b9a25b" (UID: "7b035511-6f69-41ee-b874-77ae32b9a25b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.704014 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" (UID: "f1776f21-2e73-4c7e-aff0-d7be9e65eb1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.710950 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83de920c-74b3-4ab0-bdbc-71c95d354fc7" (UID: "83de920c-74b3-4ab0-bdbc-71c95d354fc7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737301 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737329 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fngb2\" (UniqueName: \"kubernetes.io/projected/7b035511-6f69-41ee-b874-77ae32b9a25b-kube-api-access-fngb2\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737344 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdmdw\" (UniqueName: \"kubernetes.io/projected/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-kube-api-access-pdmdw\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737354 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49jwd\" (UniqueName: \"kubernetes.io/projected/9f8ca899-dbb0-4e48-b783-3e4e8337505f-kube-api-access-49jwd\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737364 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737373 4591 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737381 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737388 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83de920c-74b3-4ab0-bdbc-71c95d354fc7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737396 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b035511-6f69-41ee-b874-77ae32b9a25b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737407 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r72q\" (UniqueName: \"kubernetes.io/projected/83de920c-74b3-4ab0-bdbc-71c95d354fc7-kube-api-access-4r72q\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737415 4591 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9f8ca899-dbb0-4e48-b783-3e4e8337505f-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.737424 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr7gr\" (UniqueName: \"kubernetes.io/projected/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d-kube-api-access-kr7gr\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.740297 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" (UID: "7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.839454 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.904710 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-lrx9r"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.975082 4591 generic.go:334] "Generic (PLEG): container finished" podID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerID="52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342" exitCode=0 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.975161 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p2zbm" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.975185 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerDied","Data":"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342"} Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.975257 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p2zbm" event={"ID":"7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0","Type":"ContainerDied","Data":"52fda760a33b2a8a210ae12837b9d15e22f49064203361f4434a2dce69b0df3c"} Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.975283 4591 scope.go:117] "RemoveContainer" containerID="52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.985587 4591 generic.go:334] "Generic (PLEG): container finished" podID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerID="b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d" exitCode=0 Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.985719 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerDied","Data":"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d"} Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.985793 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm725" event={"ID":"f1776f21-2e73-4c7e-aff0-d7be9e65eb1d","Type":"ContainerDied","Data":"28489d4487009cb6577e2dd090cd1aa27ffe67e26c2102a787c1d89251976092"} Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.985938 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm725" Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.996115 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:08:48 crc kubenswrapper[4591]: I1203 12:08:48.999904 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p2zbm"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.001014 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.001111 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" event={"ID":"9f8ca899-dbb0-4e48-b783-3e4e8337505f","Type":"ContainerDied","Data":"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.001347 4591 generic.go:334] "Generic (PLEG): container finished" podID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerID="e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf" exitCode=0 Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.001517 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-849sq" event={"ID":"9f8ca899-dbb0-4e48-b783-3e4e8337505f","Type":"ContainerDied","Data":"e5611a145aae43ce6f83c3b930580d2daa18b3a0162a4d1cd5ee17744da462ef"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.008632 4591 generic.go:334] "Generic (PLEG): container finished" podID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerID="18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d" exitCode=0 Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.008730 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kjc6n" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.009006 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerDied","Data":"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.009082 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kjc6n" event={"ID":"7b035511-6f69-41ee-b874-77ae32b9a25b","Type":"ContainerDied","Data":"c58fd8e3bc2718011f28cc3d55e63e43e13b9a93c0964768958f73513146a030"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.010769 4591 scope.go:117] "RemoveContainer" containerID="772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.013317 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.015451 4591 generic.go:334] "Generic (PLEG): container finished" podID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerID="2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66" exitCode=0 Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.015610 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerDied","Data":"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.015662 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xzn8f" event={"ID":"83de920c-74b3-4ab0-bdbc-71c95d354fc7","Type":"ContainerDied","Data":"1e53d257185fff0c8d85c6594efdb1844e5bdb46a92b437507f350d28f3004b7"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.015961 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xzn8f" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.016597 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mm725"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.019039 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" event={"ID":"36715c77-dc19-46de-b452-6f43fef4b296","Type":"ContainerStarted","Data":"d639cb81c3aa8945f6cf6f87acfd3d2dca3dcb51a1a087a9ba97527416a43424"} Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.033616 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.037622 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kjc6n"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.041724 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.045508 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-849sq"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.046419 4591 scope.go:117] "RemoveContainer" containerID="70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.051580 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.053321 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xzn8f"] Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.060539 4591 scope.go:117] "RemoveContainer" containerID="52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.061035 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342\": container with ID starting with 52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342 not found: ID does not exist" containerID="52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.061088 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342"} err="failed to get container status \"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342\": rpc error: code = NotFound desc = could not find container \"52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342\": container with ID starting with 52036f2b1ba195ba5ee9bd42dd583830b6aed2eed26399cf105453c60dd28342 not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.061149 4591 scope.go:117] "RemoveContainer" containerID="772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.061475 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3\": container with ID starting with 772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3 not found: ID does not exist" containerID="772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.061522 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3"} err="failed to get container status \"772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3\": rpc error: code = NotFound desc = could not find container \"772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3\": container with ID starting with 772a78a1e68d3526a14447a721bd17d1c3d5be20531509d58278914bae2af5a3 not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.061545 4591 scope.go:117] "RemoveContainer" containerID="70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.062035 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb\": container with ID starting with 70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb not found: ID does not exist" containerID="70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.062086 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb"} err="failed to get container status \"70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb\": rpc error: code = NotFound desc = could not find container \"70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb\": container with ID starting with 70a9cb9f00b12265ad86ea15a354becd7516761880c21a3a8c0745b894f190cb not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.062102 4591 scope.go:117] "RemoveContainer" containerID="b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.073144 4591 scope.go:117] "RemoveContainer" containerID="1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.085126 4591 scope.go:117] "RemoveContainer" containerID="9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.097726 4591 scope.go:117] "RemoveContainer" containerID="b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.098049 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d\": container with ID starting with b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d not found: ID does not exist" containerID="b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098087 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d"} err="failed to get container status \"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d\": rpc error: code = NotFound desc = could not find container \"b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d\": container with ID starting with b7d64b919c6a3e8cac804607c9bb2bc9d9971414749a13cfdf10bb80e60d784d not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098119 4591 scope.go:117] "RemoveContainer" containerID="1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.098478 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd\": container with ID starting with 1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd not found: ID does not exist" containerID="1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098512 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd"} err="failed to get container status \"1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd\": rpc error: code = NotFound desc = could not find container \"1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd\": container with ID starting with 1afbbfdfc3adb1793cdd2c0d93e17e2d66dd156ba198219ae97e3d4033217efd not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098538 4591 scope.go:117] "RemoveContainer" containerID="9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.098825 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a\": container with ID starting with 9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a not found: ID does not exist" containerID="9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098849 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a"} err="failed to get container status \"9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a\": rpc error: code = NotFound desc = could not find container \"9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a\": container with ID starting with 9cc008edf1bd09aab7af2366ee6e6240e27a5093d71538f4dd9bf0ba4f35277a not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.098893 4591 scope.go:117] "RemoveContainer" containerID="e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.109696 4591 scope.go:117] "RemoveContainer" containerID="e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.110007 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf\": container with ID starting with e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf not found: ID does not exist" containerID="e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.110039 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf"} err="failed to get container status \"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf\": rpc error: code = NotFound desc = could not find container \"e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf\": container with ID starting with e0e1255818b0949f72a2e5d63590c0f44ccd1c5915074be74b0c214418134cbf not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.110057 4591 scope.go:117] "RemoveContainer" containerID="18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.121733 4591 scope.go:117] "RemoveContainer" containerID="4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.138041 4591 scope.go:117] "RemoveContainer" containerID="907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.149052 4591 scope.go:117] "RemoveContainer" containerID="18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.149456 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d\": container with ID starting with 18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d not found: ID does not exist" containerID="18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.149486 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d"} err="failed to get container status \"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d\": rpc error: code = NotFound desc = could not find container \"18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d\": container with ID starting with 18b9504fd727433f0d34ceccd9e03418bd661a1ecc6982f016f9f0fda55dc92d not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.149504 4591 scope.go:117] "RemoveContainer" containerID="4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.149774 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd\": container with ID starting with 4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd not found: ID does not exist" containerID="4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.149806 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd"} err="failed to get container status \"4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd\": rpc error: code = NotFound desc = could not find container \"4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd\": container with ID starting with 4d14f4644fd201f6be97f471ace4448d497494913ff3886b1f18e2933e48c6bd not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.149831 4591 scope.go:117] "RemoveContainer" containerID="907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.150104 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b\": container with ID starting with 907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b not found: ID does not exist" containerID="907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.150151 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b"} err="failed to get container status \"907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b\": rpc error: code = NotFound desc = could not find container \"907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b\": container with ID starting with 907b68a38edd70997a8432ff28c8b998a755e07a7898d68920bbca8ce175ae0b not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.150177 4591 scope.go:117] "RemoveContainer" containerID="2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.160924 4591 scope.go:117] "RemoveContainer" containerID="46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.172416 4591 scope.go:117] "RemoveContainer" containerID="a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.184676 4591 scope.go:117] "RemoveContainer" containerID="2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.185008 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66\": container with ID starting with 2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66 not found: ID does not exist" containerID="2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.185046 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66"} err="failed to get container status \"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66\": rpc error: code = NotFound desc = could not find container \"2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66\": container with ID starting with 2d53f15485f658f23bc51bfc18fe3a33e728cbe5b9cda39b723b9057bd578b66 not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.185077 4591 scope.go:117] "RemoveContainer" containerID="46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.185410 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d\": container with ID starting with 46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d not found: ID does not exist" containerID="46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.185456 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d"} err="failed to get container status \"46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d\": rpc error: code = NotFound desc = could not find container \"46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d\": container with ID starting with 46f97ea18f1ce51a52c716fe2aea82d8e0ce476b4597fe75e6039813643e1f4d not found: ID does not exist" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.185472 4591 scope.go:117] "RemoveContainer" containerID="a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9" Dec 03 12:08:49 crc kubenswrapper[4591]: E1203 12:08:49.185686 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9\": container with ID starting with a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9 not found: ID does not exist" containerID="a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9" Dec 03 12:08:49 crc kubenswrapper[4591]: I1203 12:08:49.185728 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9"} err="failed to get container status \"a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9\": rpc error: code = NotFound desc = could not find container \"a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9\": container with ID starting with a98d68627aa30a485badb1cc4b4c9ac7f38571691bc1a63157b8aeaf85475eb9 not found: ID does not exist" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.027140 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" event={"ID":"36715c77-dc19-46de-b452-6f43fef4b296","Type":"ContainerStarted","Data":"33603e339f1ee4a0e0f3d0024c4548fd95b978cc09a0b9017604407381dfa9bf"} Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.027674 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.031169 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.042971 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-lrx9r" podStartSLOduration=2.042953461 podStartE2EDuration="2.042953461s" podCreationTimestamp="2025-12-03 12:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:08:50.041377373 +0000 UTC m=+227.468417144" watchObservedRunningTime="2025-12-03 12:08:50.042953461 +0000 UTC m=+227.469993230" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311254 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r27xd"] Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311568 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311589 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311604 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311611 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311625 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311633 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311642 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311648 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311658 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311663 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311672 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311678 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311685 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311691 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311699 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311705 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311714 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311720 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311729 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311735 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="extract-utilities" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311746 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311752 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311771 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311776 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="extract-content" Dec 03 12:08:50 crc kubenswrapper[4591]: E1203 12:08:50.311782 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311787 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311914 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311925 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311934 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311945 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" containerName="marketplace-operator" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.311953 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" containerName="registry-server" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.312895 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.315847 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.325454 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r27xd"] Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.463181 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-catalog-content\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.463345 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-utilities\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.463457 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65s2t\" (UniqueName: \"kubernetes.io/projected/3b4b9cef-ccab-403d-9dad-5d04a216bc01-kube-api-access-65s2t\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.515521 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q2fg9"] Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.517892 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.520919 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.524246 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q2fg9"] Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.564823 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-catalog-content\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.564912 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-utilities\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.564952 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65s2t\" (UniqueName: \"kubernetes.io/projected/3b4b9cef-ccab-403d-9dad-5d04a216bc01-kube-api-access-65s2t\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.565387 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-catalog-content\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.565545 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4b9cef-ccab-403d-9dad-5d04a216bc01-utilities\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.583298 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65s2t\" (UniqueName: \"kubernetes.io/projected/3b4b9cef-ccab-403d-9dad-5d04a216bc01-kube-api-access-65s2t\") pod \"redhat-marketplace-r27xd\" (UID: \"3b4b9cef-ccab-403d-9dad-5d04a216bc01\") " pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.633212 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.665811 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-catalog-content\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.666151 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd6bl\" (UniqueName: \"kubernetes.io/projected/afa3d606-fc50-40b2-9b33-1fcc258faf3d-kube-api-access-vd6bl\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.666201 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-utilities\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.767100 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd6bl\" (UniqueName: \"kubernetes.io/projected/afa3d606-fc50-40b2-9b33-1fcc258faf3d-kube-api-access-vd6bl\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.767172 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-utilities\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.767272 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-catalog-content\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.767712 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-catalog-content\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.768345 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afa3d606-fc50-40b2-9b33-1fcc258faf3d-utilities\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.784192 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd6bl\" (UniqueName: \"kubernetes.io/projected/afa3d606-fc50-40b2-9b33-1fcc258faf3d-kube-api-access-vd6bl\") pod \"redhat-operators-q2fg9\" (UID: \"afa3d606-fc50-40b2-9b33-1fcc258faf3d\") " pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.832056 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.897027 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b035511-6f69-41ee-b874-77ae32b9a25b" path="/var/lib/kubelet/pods/7b035511-6f69-41ee-b874-77ae32b9a25b/volumes" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.897825 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0" path="/var/lib/kubelet/pods/7cff9cfc-1bad-4d45-a4ca-97c08dbcd7d0/volumes" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.898415 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83de920c-74b3-4ab0-bdbc-71c95d354fc7" path="/var/lib/kubelet/pods/83de920c-74b3-4ab0-bdbc-71c95d354fc7/volumes" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.899511 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f8ca899-dbb0-4e48-b783-3e4e8337505f" path="/var/lib/kubelet/pods/9f8ca899-dbb0-4e48-b783-3e4e8337505f/volumes" Dec 03 12:08:50 crc kubenswrapper[4591]: I1203 12:08:50.899964 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1776f21-2e73-4c7e-aff0-d7be9e65eb1d" path="/var/lib/kubelet/pods/f1776f21-2e73-4c7e-aff0-d7be9e65eb1d/volumes" Dec 03 12:08:51 crc kubenswrapper[4591]: I1203 12:08:51.013132 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r27xd"] Dec 03 12:08:51 crc kubenswrapper[4591]: W1203 12:08:51.013313 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b4b9cef_ccab_403d_9dad_5d04a216bc01.slice/crio-c3e7f2f5a0b55fe411c1d6c158a7b3dbc0d354e14a32e6b6a0718ade65e3cf32 WatchSource:0}: Error finding container c3e7f2f5a0b55fe411c1d6c158a7b3dbc0d354e14a32e6b6a0718ade65e3cf32: Status 404 returned error can't find the container with id c3e7f2f5a0b55fe411c1d6c158a7b3dbc0d354e14a32e6b6a0718ade65e3cf32 Dec 03 12:08:51 crc kubenswrapper[4591]: I1203 12:08:51.034451 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r27xd" event={"ID":"3b4b9cef-ccab-403d-9dad-5d04a216bc01","Type":"ContainerStarted","Data":"c3e7f2f5a0b55fe411c1d6c158a7b3dbc0d354e14a32e6b6a0718ade65e3cf32"} Dec 03 12:08:51 crc kubenswrapper[4591]: I1203 12:08:51.181856 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q2fg9"] Dec 03 12:08:51 crc kubenswrapper[4591]: W1203 12:08:51.210736 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafa3d606_fc50_40b2_9b33_1fcc258faf3d.slice/crio-e0371dbbd503ff0765bf07a231478690e1c04316aace9f0ca6405e8c9df5078a WatchSource:0}: Error finding container e0371dbbd503ff0765bf07a231478690e1c04316aace9f0ca6405e8c9df5078a: Status 404 returned error can't find the container with id e0371dbbd503ff0765bf07a231478690e1c04316aace9f0ca6405e8c9df5078a Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.040639 4591 generic.go:334] "Generic (PLEG): container finished" podID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" containerID="228af9e5734feafccd1dcbee19fd2d93a3ce1d12b26cee96d17b76125a25849b" exitCode=0 Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.040758 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q2fg9" event={"ID":"afa3d606-fc50-40b2-9b33-1fcc258faf3d","Type":"ContainerDied","Data":"228af9e5734feafccd1dcbee19fd2d93a3ce1d12b26cee96d17b76125a25849b"} Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.040969 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q2fg9" event={"ID":"afa3d606-fc50-40b2-9b33-1fcc258faf3d","Type":"ContainerStarted","Data":"e0371dbbd503ff0765bf07a231478690e1c04316aace9f0ca6405e8c9df5078a"} Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.043691 4591 generic.go:334] "Generic (PLEG): container finished" podID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" containerID="a97e338d9653004c506ed8fa9bae57cd37f38f35035d2524f1da3c16881951f5" exitCode=0 Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.043730 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r27xd" event={"ID":"3b4b9cef-ccab-403d-9dad-5d04a216bc01","Type":"ContainerDied","Data":"a97e338d9653004c506ed8fa9bae57cd37f38f35035d2524f1da3c16881951f5"} Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.712760 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pkxmh"] Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.713972 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.717014 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pkxmh"] Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.718629 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.794623 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-catalog-content\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.794720 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-utilities\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.794754 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4rfx\" (UniqueName: \"kubernetes.io/projected/495b88b0-ab9f-45d4-b257-da87febda2bb-kube-api-access-m4rfx\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.895803 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-catalog-content\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.896113 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-utilities\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.896139 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4rfx\" (UniqueName: \"kubernetes.io/projected/495b88b0-ab9f-45d4-b257-da87febda2bb-kube-api-access-m4rfx\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.896479 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-utilities\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.896490 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/495b88b0-ab9f-45d4-b257-da87febda2bb-catalog-content\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.907712 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tnlm8"] Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.908761 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.913804 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.915083 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4rfx\" (UniqueName: \"kubernetes.io/projected/495b88b0-ab9f-45d4-b257-da87febda2bb-kube-api-access-m4rfx\") pod \"community-operators-pkxmh\" (UID: \"495b88b0-ab9f-45d4-b257-da87febda2bb\") " pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.917718 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tnlm8"] Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.996879 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdwzk\" (UniqueName: \"kubernetes.io/projected/10dbb454-fa06-48e9-b129-d0b68864515f-kube-api-access-pdwzk\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.997037 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-utilities\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:52 crc kubenswrapper[4591]: I1203 12:08:52.997111 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-catalog-content\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.049817 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q2fg9" event={"ID":"afa3d606-fc50-40b2-9b33-1fcc258faf3d","Type":"ContainerStarted","Data":"f0b3677944e604d48a45735cbfdf994f6455ecca5e32b75deff994bc7a9eb64c"} Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.052169 4591 generic.go:334] "Generic (PLEG): container finished" podID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" containerID="3487ee5cc5585651ed0103bcbae5cec82362dce86f30132c16967c575c4d420b" exitCode=0 Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.052391 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.052669 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r27xd" event={"ID":"3b4b9cef-ccab-403d-9dad-5d04a216bc01","Type":"ContainerDied","Data":"3487ee5cc5585651ed0103bcbae5cec82362dce86f30132c16967c575c4d420b"} Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.098086 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-utilities\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.098147 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-catalog-content\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.098186 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdwzk\" (UniqueName: \"kubernetes.io/projected/10dbb454-fa06-48e9-b129-d0b68864515f-kube-api-access-pdwzk\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.098786 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-utilities\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.098804 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10dbb454-fa06-48e9-b129-d0b68864515f-catalog-content\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.112261 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdwzk\" (UniqueName: \"kubernetes.io/projected/10dbb454-fa06-48e9-b129-d0b68864515f-kube-api-access-pdwzk\") pod \"certified-operators-tnlm8\" (UID: \"10dbb454-fa06-48e9-b129-d0b68864515f\") " pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.273997 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.415591 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pkxmh"] Dec 03 12:08:53 crc kubenswrapper[4591]: W1203 12:08:53.423452 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod495b88b0_ab9f_45d4_b257_da87febda2bb.slice/crio-90d763c50530215e736b612f860c57c69651870d0c1b97001beca11734f63fa7 WatchSource:0}: Error finding container 90d763c50530215e736b612f860c57c69651870d0c1b97001beca11734f63fa7: Status 404 returned error can't find the container with id 90d763c50530215e736b612f860c57c69651870d0c1b97001beca11734f63fa7 Dec 03 12:08:53 crc kubenswrapper[4591]: I1203 12:08:53.638695 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tnlm8"] Dec 03 12:08:53 crc kubenswrapper[4591]: W1203 12:08:53.648003 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10dbb454_fa06_48e9_b129_d0b68864515f.slice/crio-eb53c69b281bdef999bb4ffa9b19819ae557bf21b7109c084f40ff10390a170e WatchSource:0}: Error finding container eb53c69b281bdef999bb4ffa9b19819ae557bf21b7109c084f40ff10390a170e: Status 404 returned error can't find the container with id eb53c69b281bdef999bb4ffa9b19819ae557bf21b7109c084f40ff10390a170e Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.058190 4591 generic.go:334] "Generic (PLEG): container finished" podID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" containerID="f0b3677944e604d48a45735cbfdf994f6455ecca5e32b75deff994bc7a9eb64c" exitCode=0 Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.058260 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q2fg9" event={"ID":"afa3d606-fc50-40b2-9b33-1fcc258faf3d","Type":"ContainerDied","Data":"f0b3677944e604d48a45735cbfdf994f6455ecca5e32b75deff994bc7a9eb64c"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.060470 4591 generic.go:334] "Generic (PLEG): container finished" podID="495b88b0-ab9f-45d4-b257-da87febda2bb" containerID="15323ffa61fb0ccb34565ef05fc4a58205f144882302e7d9ac8a4faa54714bb8" exitCode=0 Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.060515 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pkxmh" event={"ID":"495b88b0-ab9f-45d4-b257-da87febda2bb","Type":"ContainerDied","Data":"15323ffa61fb0ccb34565ef05fc4a58205f144882302e7d9ac8a4faa54714bb8"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.060571 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pkxmh" event={"ID":"495b88b0-ab9f-45d4-b257-da87febda2bb","Type":"ContainerStarted","Data":"90d763c50530215e736b612f860c57c69651870d0c1b97001beca11734f63fa7"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.064116 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r27xd" event={"ID":"3b4b9cef-ccab-403d-9dad-5d04a216bc01","Type":"ContainerStarted","Data":"1dfb2ee5610b234e563ba644a8ada7daedf8d279b35ea364a0128aa103ef9aa8"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.066107 4591 generic.go:334] "Generic (PLEG): container finished" podID="10dbb454-fa06-48e9-b129-d0b68864515f" containerID="fcc0600054a9eff5fe910a8987ae40960daa4faeeb271089ba3558699f51dbe7" exitCode=0 Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.066149 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tnlm8" event={"ID":"10dbb454-fa06-48e9-b129-d0b68864515f","Type":"ContainerDied","Data":"fcc0600054a9eff5fe910a8987ae40960daa4faeeb271089ba3558699f51dbe7"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.066168 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tnlm8" event={"ID":"10dbb454-fa06-48e9-b129-d0b68864515f","Type":"ContainerStarted","Data":"eb53c69b281bdef999bb4ffa9b19819ae557bf21b7109c084f40ff10390a170e"} Dec 03 12:08:54 crc kubenswrapper[4591]: I1203 12:08:54.105663 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r27xd" podStartSLOduration=2.596996694 podStartE2EDuration="4.105653426s" podCreationTimestamp="2025-12-03 12:08:50 +0000 UTC" firstStartedPulling="2025-12-03 12:08:52.044997998 +0000 UTC m=+229.472037768" lastFinishedPulling="2025-12-03 12:08:53.55365473 +0000 UTC m=+230.980694500" observedRunningTime="2025-12-03 12:08:54.104328941 +0000 UTC m=+231.531368712" watchObservedRunningTime="2025-12-03 12:08:54.105653426 +0000 UTC m=+231.532693197" Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.076374 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q2fg9" event={"ID":"afa3d606-fc50-40b2-9b33-1fcc258faf3d","Type":"ContainerStarted","Data":"a4e7794ba447310c97e9e25e239dfe50b76ccad99302633036ba7dd24da380fd"} Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.081387 4591 generic.go:334] "Generic (PLEG): container finished" podID="495b88b0-ab9f-45d4-b257-da87febda2bb" containerID="46d8d9d0130ce3b40b7cb844989db5ce10361c61efdf227042cd50bc7144461a" exitCode=0 Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.081493 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pkxmh" event={"ID":"495b88b0-ab9f-45d4-b257-da87febda2bb","Type":"ContainerDied","Data":"46d8d9d0130ce3b40b7cb844989db5ce10361c61efdf227042cd50bc7144461a"} Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.085205 4591 generic.go:334] "Generic (PLEG): container finished" podID="10dbb454-fa06-48e9-b129-d0b68864515f" containerID="af069b278da5735b3acb4a5dfda3d2c0c965044d23aef47347ab1174519e511f" exitCode=0 Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.086175 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tnlm8" event={"ID":"10dbb454-fa06-48e9-b129-d0b68864515f","Type":"ContainerDied","Data":"af069b278da5735b3acb4a5dfda3d2c0c965044d23aef47347ab1174519e511f"} Dec 03 12:08:55 crc kubenswrapper[4591]: I1203 12:08:55.098667 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q2fg9" podStartSLOduration=2.402830018 podStartE2EDuration="5.098646655s" podCreationTimestamp="2025-12-03 12:08:50 +0000 UTC" firstStartedPulling="2025-12-03 12:08:52.043374602 +0000 UTC m=+229.470414372" lastFinishedPulling="2025-12-03 12:08:54.739191239 +0000 UTC m=+232.166231009" observedRunningTime="2025-12-03 12:08:55.093676598 +0000 UTC m=+232.520716368" watchObservedRunningTime="2025-12-03 12:08:55.098646655 +0000 UTC m=+232.525686424" Dec 03 12:08:56 crc kubenswrapper[4591]: I1203 12:08:56.093313 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pkxmh" event={"ID":"495b88b0-ab9f-45d4-b257-da87febda2bb","Type":"ContainerStarted","Data":"6b7dce3afbf191dc345acac978f946d693f10abfb15e32cbdc8a8fa2aafd1460"} Dec 03 12:08:56 crc kubenswrapper[4591]: I1203 12:08:56.096671 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tnlm8" event={"ID":"10dbb454-fa06-48e9-b129-d0b68864515f","Type":"ContainerStarted","Data":"e33c8c1e1e7250cb7b31191b663be2fcd39cdf9117546943d515ba67d0e23295"} Dec 03 12:08:56 crc kubenswrapper[4591]: I1203 12:08:56.111543 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pkxmh" podStartSLOduration=2.550376445 podStartE2EDuration="4.111519365s" podCreationTimestamp="2025-12-03 12:08:52 +0000 UTC" firstStartedPulling="2025-12-03 12:08:54.061988231 +0000 UTC m=+231.489028001" lastFinishedPulling="2025-12-03 12:08:55.623131152 +0000 UTC m=+233.050170921" observedRunningTime="2025-12-03 12:08:56.10854861 +0000 UTC m=+233.535588380" watchObservedRunningTime="2025-12-03 12:08:56.111519365 +0000 UTC m=+233.538559135" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.710150 4591 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.711762 4591 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.711983 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712132 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b" gracePeriod=15 Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712211 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940" gracePeriod=15 Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712311 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424" gracePeriod=15 Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712426 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea" gracePeriod=15 Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712477 4591 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712625 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591" gracePeriod=15 Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712791 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712824 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712847 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712855 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712867 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712876 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712885 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712891 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712904 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712913 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.712926 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.712933 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.713092 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.713115 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.713125 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.713135 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.713145 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:08:57 crc kubenswrapper[4591]: E1203 12:08:57.767513 4591 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.19:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877374 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877635 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877673 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877701 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877759 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877776 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877804 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.877830 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.937033 4591 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.937130 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.979669 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.980239 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.980886 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981059 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981308 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981476 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981571 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981787 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981867 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.981967 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982157 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982262 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982304 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982376 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982403 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:57 crc kubenswrapper[4591]: I1203 12:08:57.982610 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.068629 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:58 crc kubenswrapper[4591]: W1203 12:08:58.084512 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-9b1d6e4138c1171d2d86b98b0b9713da1380cb40dcae122acd4efe9ae07f5bf3 WatchSource:0}: Error finding container 9b1d6e4138c1171d2d86b98b0b9713da1380cb40dcae122acd4efe9ae07f5bf3: Status 404 returned error can't find the container with id 9b1d6e4138c1171d2d86b98b0b9713da1380cb40dcae122acd4efe9ae07f5bf3 Dec 03 12:08:58 crc kubenswrapper[4591]: E1203 12:08:58.090754 4591 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.19:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187db345091bc5e7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:08:58.086032871 +0000 UTC m=+235.513072642,LastTimestamp:2025-12-03 12:08:58.086032871 +0000 UTC m=+235.513072642,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.108530 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9b1d6e4138c1171d2d86b98b0b9713da1380cb40dcae122acd4efe9ae07f5bf3"} Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.110428 4591 generic.go:334] "Generic (PLEG): container finished" podID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" containerID="7cc588c9c07146700620c13756329941a167026f0ec2bde94673e0cbfdd723d4" exitCode=0 Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.110487 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c1e47a4-b770-4931-8ae0-f1e70ed1b680","Type":"ContainerDied","Data":"7cc588c9c07146700620c13756329941a167026f0ec2bde94673e0cbfdd723d4"} Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.112087 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.112588 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.114129 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.114648 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424" exitCode=0 Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.114671 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940" exitCode=0 Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.114678 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea" exitCode=0 Dec 03 12:08:58 crc kubenswrapper[4591]: I1203 12:08:58.114687 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591" exitCode=2 Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.121002 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e6f0d347e12d0d715444166ec67fd1aaa86fc94d29387786427f4a40c604d8d4"} Dec 03 12:08:59 crc kubenswrapper[4591]: E1203 12:08:59.123080 4591 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.19:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.123106 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.338086 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.338683 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.502641 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir\") pod \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.502704 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock\") pod \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.502744 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access\") pod \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\" (UID: \"8c1e47a4-b770-4931-8ae0-f1e70ed1b680\") " Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.502774 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8c1e47a4-b770-4931-8ae0-f1e70ed1b680" (UID: "8c1e47a4-b770-4931-8ae0-f1e70ed1b680"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.502872 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock" (OuterVolumeSpecName: "var-lock") pod "8c1e47a4-b770-4931-8ae0-f1e70ed1b680" (UID: "8c1e47a4-b770-4931-8ae0-f1e70ed1b680"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.503135 4591 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.503157 4591 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.514594 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8c1e47a4-b770-4931-8ae0-f1e70ed1b680" (UID: "8c1e47a4-b770-4931-8ae0-f1e70ed1b680"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:08:59 crc kubenswrapper[4591]: I1203 12:08:59.604424 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c1e47a4-b770-4931-8ae0-f1e70ed1b680-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.068101 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.068845 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.069572 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.069921 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.128735 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c1e47a4-b770-4931-8ae0-f1e70ed1b680","Type":"ContainerDied","Data":"184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e"} Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.128766 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.128781 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="184fb0d49c3f8e93b4e63a673d301e5eb976819c60d89a1c4bba460e897df81e" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.130846 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.131355 4591 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b" exitCode=0 Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.131969 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.132191 4591 scope.go:117] "RemoveContainer" containerID="ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.132327 4591 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.19:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.139521 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.140047 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.145180 4591 scope.go:117] "RemoveContainer" containerID="32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.156827 4591 scope.go:117] "RemoveContainer" containerID="e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.167773 4591 scope.go:117] "RemoveContainer" containerID="6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.177732 4591 scope.go:117] "RemoveContainer" containerID="b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.188296 4591 scope.go:117] "RemoveContainer" containerID="7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.200829 4591 scope.go:117] "RemoveContainer" containerID="ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.201242 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\": container with ID starting with ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424 not found: ID does not exist" containerID="ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201280 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424"} err="failed to get container status \"ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\": rpc error: code = NotFound desc = could not find container \"ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424\": container with ID starting with ec4f1aa0e87b2722f0d911bf5d1aef8d191e5056b1578f1b550c6c9a26ffd424 not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201307 4591 scope.go:117] "RemoveContainer" containerID="32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.201587 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\": container with ID starting with 32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940 not found: ID does not exist" containerID="32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201616 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940"} err="failed to get container status \"32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\": rpc error: code = NotFound desc = could not find container \"32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940\": container with ID starting with 32568a7bd7e34ec9e7c9cd1521a995b0793d3d883b4a1b1f900cd4badd763940 not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201637 4591 scope.go:117] "RemoveContainer" containerID="e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.201867 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\": container with ID starting with e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea not found: ID does not exist" containerID="e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201894 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea"} err="failed to get container status \"e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\": rpc error: code = NotFound desc = could not find container \"e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea\": container with ID starting with e7e65230fdd5bacb8cb0296641ec9c425251a18ed293180a82c7420c43a6aeea not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.201911 4591 scope.go:117] "RemoveContainer" containerID="6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.202187 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\": container with ID starting with 6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591 not found: ID does not exist" containerID="6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.202214 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591"} err="failed to get container status \"6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\": rpc error: code = NotFound desc = could not find container \"6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591\": container with ID starting with 6cfc136f428983f0e87ac0b730ab8ffccffe19595f6c73a95c49afadf466f591 not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.202228 4591 scope.go:117] "RemoveContainer" containerID="b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.202539 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\": container with ID starting with b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b not found: ID does not exist" containerID="b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.202569 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b"} err="failed to get container status \"b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\": rpc error: code = NotFound desc = could not find container \"b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b\": container with ID starting with b39fd031a7b3e7da59871aa85b96fd84fa4409de28417f09a0ee6e86217b556b not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.202584 4591 scope.go:117] "RemoveContainer" containerID="7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf" Dec 03 12:09:00 crc kubenswrapper[4591]: E1203 12:09:00.202809 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\": container with ID starting with 7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf not found: ID does not exist" containerID="7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.202834 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf"} err="failed to get container status \"7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\": rpc error: code = NotFound desc = could not find container \"7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf\": container with ID starting with 7fd6c8e6f5df8cda81a83bec242fd9e0a8edea38641361a18e94d975c2031ddf not found: ID does not exist" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212361 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212397 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212446 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212484 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212510 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212609 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212740 4591 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212763 4591 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.212773 4591 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.445033 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.445545 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.633799 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.633921 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.680583 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.680933 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.681220 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.681449 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.833205 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.833266 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.871124 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.871612 4591 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.872992 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.873341 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.873556 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:00 crc kubenswrapper[4591]: I1203 12:09:00.896216 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.178100 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r27xd" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.178417 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q2fg9" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.178469 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.178830 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.179086 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.179393 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.179706 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:01 crc kubenswrapper[4591]: I1203 12:09:01.180005 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:02 crc kubenswrapper[4591]: I1203 12:09:02.892628 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:02 crc kubenswrapper[4591]: I1203 12:09:02.892891 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:02 crc kubenswrapper[4591]: I1203 12:09:02.893468 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.052758 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.053099 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.087304 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.087758 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.088032 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.088251 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.088411 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.183441 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pkxmh" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.183809 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.184149 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.184463 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.184757 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.274929 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.274993 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.308866 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.309377 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.309740 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.310130 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.310457 4591 status_manager.go:851] "Failed to get status for pod" podUID="10dbb454-fa06-48e9-b129-d0b68864515f" pod="openshift-marketplace/certified-operators-tnlm8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-tnlm8\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:03 crc kubenswrapper[4591]: I1203 12:09:03.310775 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.192863 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tnlm8" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.193652 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.194081 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.194470 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.194719 4591 status_manager.go:851] "Failed to get status for pod" podUID="10dbb454-fa06-48e9-b129-d0b68864515f" pod="openshift-marketplace/certified-operators-tnlm8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-tnlm8\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:04 crc kubenswrapper[4591]: I1203 12:09:04.194994 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:05 crc kubenswrapper[4591]: E1203 12:09:05.227772 4591 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.19:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187db345091bc5e7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:08:58.086032871 +0000 UTC m=+235.513072642,LastTimestamp:2025-12-03 12:08:58.086032871 +0000 UTC m=+235.513072642,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.189555 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:09:07Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:09:07Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:09:07Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:09:07Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.189798 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.189982 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.190164 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.190311 4591 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.190323 4591 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.237010 4591 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.237284 4591 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.237491 4591 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.237848 4591 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.238082 4591 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:07 crc kubenswrapper[4591]: I1203 12:09:07.238112 4591 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.238349 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="200ms" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.439224 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="400ms" Dec 03 12:09:07 crc kubenswrapper[4591]: E1203 12:09:07.840621 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="800ms" Dec 03 12:09:08 crc kubenswrapper[4591]: E1203 12:09:08.642287 4591 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.19:6443: connect: connection refused" interval="1.6s" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.889954 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.890600 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.890850 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.891049 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.891451 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.891857 4591 status_manager.go:851] "Failed to get status for pod" podUID="10dbb454-fa06-48e9-b129-d0b68864515f" pod="openshift-marketplace/certified-operators-tnlm8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-tnlm8\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.904491 4591 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.904526 4591 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:08 crc kubenswrapper[4591]: E1203 12:09:08.904973 4591 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:08 crc kubenswrapper[4591]: I1203 12:09:08.905826 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:08 crc kubenswrapper[4591]: W1203 12:09:08.931550 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-36a9c5bc40c20bd3f1f1d8dc54cb8fb170ad52d34506452d54cd7a17b12a3968 WatchSource:0}: Error finding container 36a9c5bc40c20bd3f1f1d8dc54cb8fb170ad52d34506452d54cd7a17b12a3968: Status 404 returned error can't find the container with id 36a9c5bc40c20bd3f1f1d8dc54cb8fb170ad52d34506452d54cd7a17b12a3968 Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197075 4591 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="e3972e4e311d0c9a9c9d29c8163a53dc94e74df05630e8df6f37b8ba19f57b1a" exitCode=0 Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197112 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"e3972e4e311d0c9a9c9d29c8163a53dc94e74df05630e8df6f37b8ba19f57b1a"} Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197177 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"36a9c5bc40c20bd3f1f1d8dc54cb8fb170ad52d34506452d54cd7a17b12a3968"} Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197481 4591 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197507 4591 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:09 crc kubenswrapper[4591]: E1203 12:09:09.197792 4591 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.197821 4591 status_manager.go:851] "Failed to get status for pod" podUID="10dbb454-fa06-48e9-b129-d0b68864515f" pod="openshift-marketplace/certified-operators-tnlm8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-tnlm8\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.198118 4591 status_manager.go:851] "Failed to get status for pod" podUID="3b4b9cef-ccab-403d-9dad-5d04a216bc01" pod="openshift-marketplace/redhat-marketplace-r27xd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r27xd\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.198372 4591 status_manager.go:851] "Failed to get status for pod" podUID="afa3d606-fc50-40b2-9b33-1fcc258faf3d" pod="openshift-marketplace/redhat-operators-q2fg9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-q2fg9\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.198628 4591 status_manager.go:851] "Failed to get status for pod" podUID="495b88b0-ab9f-45d4-b257-da87febda2bb" pod="openshift-marketplace/community-operators-pkxmh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-pkxmh\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:09 crc kubenswrapper[4591]: I1203 12:09:09.198826 4591 status_manager.go:851] "Failed to get status for pod" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.19:6443: connect: connection refused" Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205362 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a2a0f8c4735ccc20b914ea03308212c2b69a120727b50ae7923597e2e9d7ddf3"} Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205420 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"890922c58c75c4006b2d7b9e3c7becc33ac8e00e9117880feb2e182123e08639"} Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205433 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ec6a76b1f119a20ec0bfea1821a4f380878afc7821f061b11873a8fad1137c6d"} Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205442 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8efab2759126489ce7fdeddb1d1a9b8ce16796d9a748e2ffd4fbde87ec84c951"} Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205450 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f932a1bce2318197b8a28ea4c6a8a780b365ce3334e54525886d437f9ebdf1d9"} Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205764 4591 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205780 4591 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:10 crc kubenswrapper[4591]: I1203 12:09:10.205902 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.231442 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.231776 4591 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99" exitCode=1 Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.231813 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99"} Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.232236 4591 scope.go:117] "RemoveContainer" containerID="e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99" Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.906786 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.907281 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:13 crc kubenswrapper[4591]: I1203 12:09:13.911044 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:14 crc kubenswrapper[4591]: I1203 12:09:14.243161 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 12:09:14 crc kubenswrapper[4591]: I1203 12:09:14.243223 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9c5bbb956db12c6b6e5185fc711c44913cf1834e9a29cfdbaf45d01d900a3da6"} Dec 03 12:09:15 crc kubenswrapper[4591]: I1203 12:09:15.764904 4591 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:15 crc kubenswrapper[4591]: I1203 12:09:15.876813 4591 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c97fc9de-b9f2-41ec-9a4c-5bd3f0d361c2" Dec 03 12:09:16 crc kubenswrapper[4591]: I1203 12:09:16.256930 4591 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:16 crc kubenswrapper[4591]: I1203 12:09:16.257240 4591 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:16 crc kubenswrapper[4591]: I1203 12:09:16.259521 4591 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c97fc9de-b9f2-41ec-9a4c-5bd3f0d361c2" Dec 03 12:09:16 crc kubenswrapper[4591]: I1203 12:09:16.260401 4591 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://f932a1bce2318197b8a28ea4c6a8a780b365ce3334e54525886d437f9ebdf1d9" Dec 03 12:09:16 crc kubenswrapper[4591]: I1203 12:09:16.260436 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:17 crc kubenswrapper[4591]: I1203 12:09:17.262680 4591 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:17 crc kubenswrapper[4591]: I1203 12:09:17.262710 4591 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8e9091b9-cf99-4097-a1a6-98adef19bc6f" Dec 03 12:09:17 crc kubenswrapper[4591]: I1203 12:09:17.265438 4591 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c97fc9de-b9f2-41ec-9a4c-5bd3f0d361c2" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.064580 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.190478 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.239273 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.441469 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.725774 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 12:09:22 crc kubenswrapper[4591]: I1203 12:09:22.756086 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.005936 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.204954 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.205161 4591 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.205213 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.358889 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.787213 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 12:09:23 crc kubenswrapper[4591]: I1203 12:09:23.826842 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.042165 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.089381 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.191987 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.389620 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.549518 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 12:09:24 crc kubenswrapper[4591]: I1203 12:09:24.926233 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 12:09:25 crc kubenswrapper[4591]: I1203 12:09:25.070758 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 12:09:25 crc kubenswrapper[4591]: I1203 12:09:25.110642 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 12:09:25 crc kubenswrapper[4591]: I1203 12:09:25.335618 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 12:09:25 crc kubenswrapper[4591]: I1203 12:09:25.635568 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 12:09:26 crc kubenswrapper[4591]: I1203 12:09:26.385102 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 12:09:26 crc kubenswrapper[4591]: I1203 12:09:26.497750 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 12:09:26 crc kubenswrapper[4591]: I1203 12:09:26.526047 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 12:09:26 crc kubenswrapper[4591]: I1203 12:09:26.710891 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 12:09:27 crc kubenswrapper[4591]: I1203 12:09:27.155430 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 12:09:27 crc kubenswrapper[4591]: I1203 12:09:27.530856 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 12:09:28 crc kubenswrapper[4591]: I1203 12:09:28.258015 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 12:09:28 crc kubenswrapper[4591]: I1203 12:09:28.521672 4591 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 12:09:28 crc kubenswrapper[4591]: I1203 12:09:28.831561 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 12:09:28 crc kubenswrapper[4591]: I1203 12:09:28.931132 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 12:09:28 crc kubenswrapper[4591]: I1203 12:09:28.970272 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.045326 4591 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.119188 4591 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.504189 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.509327 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.603666 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.669445 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.760333 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.855350 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.892386 4591 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 12:09:29 crc kubenswrapper[4591]: I1203 12:09:29.984680 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.016243 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.252006 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.270358 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.483686 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.552816 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.692001 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 12:09:30 crc kubenswrapper[4591]: I1203 12:09:30.887856 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.029096 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.104938 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.198886 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.216849 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.369326 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.436497 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.655217 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.702115 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.715462 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.724098 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.740688 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.815705 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.932982 4591 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.937385 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tnlm8" podStartSLOduration=38.401779827 podStartE2EDuration="39.937358556s" podCreationTimestamp="2025-12-03 12:08:52 +0000 UTC" firstStartedPulling="2025-12-03 12:08:54.067270342 +0000 UTC m=+231.494310113" lastFinishedPulling="2025-12-03 12:08:55.602849072 +0000 UTC m=+233.029888842" observedRunningTime="2025-12-03 12:08:56.128750198 +0000 UTC m=+233.555789968" watchObservedRunningTime="2025-12-03 12:09:31.937358556 +0000 UTC m=+269.364398326" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.938540 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.938606 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.944989 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.961484 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=16.961466798 podStartE2EDuration="16.961466798s" podCreationTimestamp="2025-12-03 12:09:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:09:31.959382983 +0000 UTC m=+269.386422763" watchObservedRunningTime="2025-12-03 12:09:31.961466798 +0000 UTC m=+269.388506568" Dec 03 12:09:31 crc kubenswrapper[4591]: I1203 12:09:31.992354 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.057324 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.057375 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.237935 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.403985 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.458794 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.558847 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.611014 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.612593 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.701251 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.794427 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.811505 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.826169 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 12:09:32 crc kubenswrapper[4591]: I1203 12:09:32.996592 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.077464 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.205033 4591 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.205102 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.243243 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.261569 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.327985 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.401825 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.462514 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.480829 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.483972 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.525161 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.527121 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.534168 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.584039 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.610656 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.647441 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.716380 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.723213 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.738752 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.761200 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.811609 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.834924 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.838533 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.861687 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.949811 4591 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 12:09:33 crc kubenswrapper[4591]: I1203 12:09:33.959003 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.066716 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.075553 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.168280 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.222662 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.231385 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.260312 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.280379 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.299383 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.394659 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.408924 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.423095 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.467774 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.552605 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.603025 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.654697 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.680897 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.827754 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.897084 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 12:09:34 crc kubenswrapper[4591]: I1203 12:09:34.933103 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.067865 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.068132 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.152202 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.276556 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.306123 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.404556 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.470354 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.533763 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.572863 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.576393 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.655568 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.685170 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.700586 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.713736 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.796149 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.885219 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 12:09:35 crc kubenswrapper[4591]: I1203 12:09:35.911036 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.105895 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.133715 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.158269 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.269997 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.279731 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.284158 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.289316 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.316962 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.336351 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.453109 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.503046 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.511862 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.574038 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.734691 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.759816 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.920439 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.944575 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 12:09:36 crc kubenswrapper[4591]: I1203 12:09:36.948960 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.128517 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.193282 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.201177 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.217048 4591 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.217326 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://e6f0d347e12d0d715444166ec67fd1aaa86fc94d29387786427f4a40c604d8d4" gracePeriod=5 Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.257082 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.299347 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.398390 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.415253 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.422242 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.475046 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.502042 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.508377 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.648958 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.671316 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.975637 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 12:09:37 crc kubenswrapper[4591]: I1203 12:09:37.992319 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.013054 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.102312 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.261030 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.266769 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.442568 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.451961 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.530286 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.569939 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.632969 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.671902 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.686378 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.697397 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.777129 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.807057 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.859150 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:09:38 crc kubenswrapper[4591]: I1203 12:09:38.964885 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.245033 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.289056 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.300099 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.307206 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.395524 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.397571 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.572578 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.780333 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.883888 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 12:09:39 crc kubenswrapper[4591]: I1203 12:09:39.923769 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.232295 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.245167 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.435363 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.436231 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.471761 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.557110 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.624515 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.660896 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.668159 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.722191 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.811308 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.845252 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.869267 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 12:09:40 crc kubenswrapper[4591]: I1203 12:09:40.907265 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.079323 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.305948 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.330978 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.340105 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.534855 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.596856 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.612847 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.665345 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.772263 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.786546 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.916309 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 12:09:41 crc kubenswrapper[4591]: I1203 12:09:41.921496 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.065988 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.129794 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.159095 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.275207 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.378563 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.396359 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.396404 4591 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="e6f0d347e12d0d715444166ec67fd1aaa86fc94d29387786427f4a40c604d8d4" exitCode=137 Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.442917 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.631360 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.637446 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.759208 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.779447 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.779537 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.784552 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878211 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878264 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878319 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878313 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878340 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878361 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878389 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878429 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878523 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878788 4591 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878808 4591 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878818 4591 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.878830 4591 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.886670 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.897219 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.905279 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.918839 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.948792 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.963817 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 12:09:42 crc kubenswrapper[4591]: I1203 12:09:42.981471 4591 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.019139 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.098921 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.181495 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.205322 4591 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.205389 4591 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.205451 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.206001 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"9c5bbb956db12c6b6e5185fc711c44913cf1834e9a29cfdbaf45d01d900a3da6"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.206134 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://9c5bbb956db12c6b6e5185fc711c44913cf1834e9a29cfdbaf45d01d900a3da6" gracePeriod=30 Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.282204 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.404490 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.406218 4591 scope.go:117] "RemoveContainer" containerID="e6f0d347e12d0d715444166ec67fd1aaa86fc94d29387786427f4a40c604d8d4" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.406321 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.551296 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.649382 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.678347 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.738517 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 12:09:43 crc kubenswrapper[4591]: I1203 12:09:43.891244 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.029849 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.040414 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.109772 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.131858 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.170595 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.417155 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.498118 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 12:09:44 crc kubenswrapper[4591]: I1203 12:09:44.661726 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 12:09:45 crc kubenswrapper[4591]: I1203 12:09:45.167460 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 12:09:45 crc kubenswrapper[4591]: I1203 12:09:45.985155 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.852697 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd"] Dec 03 12:10:09 crc kubenswrapper[4591]: E1203 12:10:09.853381 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.853393 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:10:09 crc kubenswrapper[4591]: E1203 12:10:09.853401 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" containerName="installer" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.853407 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" containerName="installer" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.853506 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1e47a4-b770-4931-8ae0-f1e70ed1b680" containerName="installer" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.853520 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.853910 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.857793 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-dockercfg-wwt9l" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.858094 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"telemetry-config" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.858151 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"openshift-service-ca.crt" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.858488 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-tls" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.865294 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-root-ca.crt" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.865978 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd"] Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.936418 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jhxp\" (UniqueName: \"kubernetes.io/projected/af16b7bc-797a-4b13-89f0-68b0e8f2915d-kube-api-access-5jhxp\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.936501 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/af16b7bc-797a-4b13-89f0-68b0e8f2915d-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:09 crc kubenswrapper[4591]: I1203 12:10:09.936636 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/af16b7bc-797a-4b13-89f0-68b0e8f2915d-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.037761 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/af16b7bc-797a-4b13-89f0-68b0e8f2915d-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.037851 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/af16b7bc-797a-4b13-89f0-68b0e8f2915d-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.037899 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jhxp\" (UniqueName: \"kubernetes.io/projected/af16b7bc-797a-4b13-89f0-68b0e8f2915d-kube-api-access-5jhxp\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.042685 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/af16b7bc-797a-4b13-89f0-68b0e8f2915d-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.052372 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/af16b7bc-797a-4b13-89f0-68b0e8f2915d-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.056286 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jhxp\" (UniqueName: \"kubernetes.io/projected/af16b7bc-797a-4b13-89f0-68b0e8f2915d-kube-api-access-5jhxp\") pod \"cluster-monitoring-operator-6d5b84845-lftbd\" (UID: \"af16b7bc-797a-4b13-89f0-68b0e8f2915d\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.168477 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" Dec 03 12:10:10 crc kubenswrapper[4591]: I1203 12:10:10.543301 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd"] Dec 03 12:10:11 crc kubenswrapper[4591]: I1203 12:10:11.587122 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" event={"ID":"af16b7bc-797a-4b13-89f0-68b0e8f2915d","Type":"ContainerStarted","Data":"bf3fbe2976345aecaaea820ab32c0959aba49a6ee4ca6bea3c64ae0f5de1b69b"} Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.595275 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" event={"ID":"af16b7bc-797a-4b13-89f0-68b0e8f2915d","Type":"ContainerStarted","Data":"d27c2f6a9eb98c1ef70edba3e9caa7f6534f57dadc5cfb235120786b878e399e"} Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.614592 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-lftbd" podStartSLOduration=1.7508268280000001 podStartE2EDuration="3.614572849s" podCreationTimestamp="2025-12-03 12:10:09 +0000 UTC" firstStartedPulling="2025-12-03 12:10:10.552808586 +0000 UTC m=+307.979848357" lastFinishedPulling="2025-12-03 12:10:12.416554608 +0000 UTC m=+309.843594378" observedRunningTime="2025-12-03 12:10:12.609836107 +0000 UTC m=+310.036875877" watchObservedRunningTime="2025-12-03 12:10:12.614572849 +0000 UTC m=+310.041612619" Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.923750 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t"] Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.924468 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.926013 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-tls" Dec 03 12:10:12 crc kubenswrapper[4591]: I1203 12:10:12.932532 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t"] Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.078550 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.180937 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:13 crc kubenswrapper[4591]: E1203 12:10:13.181150 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:13 crc kubenswrapper[4591]: E1203 12:10:13.181241 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:13.681220598 +0000 UTC m=+311.108260367 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.604221 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.605965 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.606016 4591 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="9c5bbb956db12c6b6e5185fc711c44913cf1834e9a29cfdbaf45d01d900a3da6" exitCode=137 Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.606116 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"9c5bbb956db12c6b6e5185fc711c44913cf1834e9a29cfdbaf45d01d900a3da6"} Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.606180 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9e5bbeb5f74f226efd825a7e9d813d5b71c836d878fef1264218b31b6ffc9987"} Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.606201 4591 scope.go:117] "RemoveContainer" containerID="e233816c191a82857b12b61ea780767d00a63edfb89049275c89eacdfe3f7f99" Dec 03 12:10:13 crc kubenswrapper[4591]: I1203 12:10:13.689957 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:13 crc kubenswrapper[4591]: E1203 12:10:13.690206 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:13 crc kubenswrapper[4591]: E1203 12:10:13.690503 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:14.690462711 +0000 UTC m=+312.117502481 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:14 crc kubenswrapper[4591]: I1203 12:10:14.614436 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 12:10:14 crc kubenswrapper[4591]: I1203 12:10:14.704737 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:14 crc kubenswrapper[4591]: E1203 12:10:14.704919 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:14 crc kubenswrapper[4591]: E1203 12:10:14.704998 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:16.70497671 +0000 UTC m=+314.132016480 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:16 crc kubenswrapper[4591]: I1203 12:10:16.730489 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:16 crc kubenswrapper[4591]: E1203 12:10:16.730711 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:16 crc kubenswrapper[4591]: E1203 12:10:16.731327 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:20.731306918 +0000 UTC m=+318.158346688 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:20 crc kubenswrapper[4591]: I1203 12:10:20.786051 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:20 crc kubenswrapper[4591]: E1203 12:10:20.786282 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:20 crc kubenswrapper[4591]: E1203 12:10:20.786477 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:28.786450408 +0000 UTC m=+326.213490178 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:22 crc kubenswrapper[4591]: I1203 12:10:22.064283 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:10:23 crc kubenswrapper[4591]: I1203 12:10:23.204849 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:10:23 crc kubenswrapper[4591]: I1203 12:10:23.208579 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:10:23 crc kubenswrapper[4591]: I1203 12:10:23.669767 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:10:25 crc kubenswrapper[4591]: I1203 12:10:25.299837 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:10:25 crc kubenswrapper[4591]: I1203 12:10:25.300165 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:10:28 crc kubenswrapper[4591]: I1203 12:10:28.790396 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:28 crc kubenswrapper[4591]: E1203 12:10:28.790625 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:28 crc kubenswrapper[4591]: E1203 12:10:28.790961 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:10:44.790929784 +0000 UTC m=+342.217969554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:31 crc kubenswrapper[4591]: I1203 12:10:31.937555 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:10:31 crc kubenswrapper[4591]: I1203 12:10:31.938051 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" podUID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" containerName="controller-manager" containerID="cri-o://19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d" gracePeriod=30 Dec 03 12:10:31 crc kubenswrapper[4591]: I1203 12:10:31.940747 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:10:31 crc kubenswrapper[4591]: I1203 12:10:31.940986 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" podUID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" containerName="route-controller-manager" containerID="cri-o://84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d" gracePeriod=30 Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.270557 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.275886 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433015 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca\") pod \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433103 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvzh2\" (UniqueName: \"kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2\") pod \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433131 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles\") pod \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433169 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-252ng\" (UniqueName: \"kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng\") pod \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433199 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca\") pod \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433218 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert\") pod \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433261 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config\") pod \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433303 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert\") pod \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\" (UID: \"35f7aa4d-e222-4ee7-a1a3-93a226933fd6\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433355 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config\") pod \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\" (UID: \"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a\") " Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433785 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca" (OuterVolumeSpecName: "client-ca") pod "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" (UID: "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.433968 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config" (OuterVolumeSpecName: "config") pod "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" (UID: "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.434154 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca" (OuterVolumeSpecName: "client-ca") pod "35f7aa4d-e222-4ee7-a1a3-93a226933fd6" (UID: "35f7aa4d-e222-4ee7-a1a3-93a226933fd6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.434304 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "35f7aa4d-e222-4ee7-a1a3-93a226933fd6" (UID: "35f7aa4d-e222-4ee7-a1a3-93a226933fd6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.434337 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config" (OuterVolumeSpecName: "config") pod "35f7aa4d-e222-4ee7-a1a3-93a226933fd6" (UID: "35f7aa4d-e222-4ee7-a1a3-93a226933fd6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.446215 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "35f7aa4d-e222-4ee7-a1a3-93a226933fd6" (UID: "35f7aa4d-e222-4ee7-a1a3-93a226933fd6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.446241 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng" (OuterVolumeSpecName: "kube-api-access-252ng") pod "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" (UID: "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a"). InnerVolumeSpecName "kube-api-access-252ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.446299 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" (UID: "4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.446653 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2" (OuterVolumeSpecName: "kube-api-access-kvzh2") pod "35f7aa4d-e222-4ee7-a1a3-93a226933fd6" (UID: "35f7aa4d-e222-4ee7-a1a3-93a226933fd6"). InnerVolumeSpecName "kube-api-access-kvzh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535549 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-252ng\" (UniqueName: \"kubernetes.io/projected/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-kube-api-access-252ng\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535580 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535593 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535604 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535614 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535623 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535632 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535642 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvzh2\" (UniqueName: \"kubernetes.io/projected/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-kube-api-access-kvzh2\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.535650 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35f7aa4d-e222-4ee7-a1a3-93a226933fd6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.714927 4591 generic.go:334] "Generic (PLEG): container finished" podID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" containerID="84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d" exitCode=0 Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.715050 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.715053 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" event={"ID":"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a","Type":"ContainerDied","Data":"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d"} Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.715761 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv" event={"ID":"4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a","Type":"ContainerDied","Data":"b3946a75c0b755beddb19262694ef47e7a3768386fa55e885858cf15ec926d26"} Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.715800 4591 scope.go:117] "RemoveContainer" containerID="84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.718781 4591 generic.go:334] "Generic (PLEG): container finished" podID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" containerID="19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d" exitCode=0 Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.718864 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" event={"ID":"35f7aa4d-e222-4ee7-a1a3-93a226933fd6","Type":"ContainerDied","Data":"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d"} Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.718910 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" event={"ID":"35f7aa4d-e222-4ee7-a1a3-93a226933fd6","Type":"ContainerDied","Data":"a9b64483411b3d5c8f32113df008b05e98833cfe5f0bbbd85400110a0c97c679"} Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.718996 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-78vjm" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.734139 4591 scope.go:117] "RemoveContainer" containerID="84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d" Dec 03 12:10:32 crc kubenswrapper[4591]: E1203 12:10:32.734923 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d\": container with ID starting with 84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d not found: ID does not exist" containerID="84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.735014 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d"} err="failed to get container status \"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d\": rpc error: code = NotFound desc = could not find container \"84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d\": container with ID starting with 84ba80c4c6c75ef0f512be44664573f3a5d2db8b57f6ea78239cf1be9c9aa77d not found: ID does not exist" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.735167 4591 scope.go:117] "RemoveContainer" containerID="19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.755513 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.760939 4591 scope.go:117] "RemoveContainer" containerID="19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d" Dec 03 12:10:32 crc kubenswrapper[4591]: E1203 12:10:32.761319 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d\": container with ID starting with 19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d not found: ID does not exist" containerID="19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.761376 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d"} err="failed to get container status \"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d\": rpc error: code = NotFound desc = could not find container \"19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d\": container with ID starting with 19708a40f69d7d8c368dd3d5169e6ba0abed0aa3df8aa3b00b4e27f98bd44c2d not found: ID does not exist" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.764197 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r9rbv"] Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.767674 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.771232 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-78vjm"] Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.897886 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" path="/var/lib/kubelet/pods/35f7aa4d-e222-4ee7-a1a3-93a226933fd6/volumes" Dec 03 12:10:32 crc kubenswrapper[4591]: I1203 12:10:32.898652 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" path="/var/lib/kubelet/pods/4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a/volumes" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.043251 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:33 crc kubenswrapper[4591]: E1203 12:10:33.043487 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" containerName="controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.043502 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" containerName="controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: E1203 12:10:33.043519 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" containerName="route-controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.043526 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" containerName="route-controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.043620 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7d77fe-8f25-4c1f-8d0d-6a8413a32e9a" containerName="route-controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.043634 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f7aa4d-e222-4ee7-a1a3-93a226933fd6" containerName="controller-manager" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.044075 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.045901 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046283 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046400 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046481 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046518 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h8qd\" (UniqueName: \"kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046538 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046560 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046753 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.046829 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.047216 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.047417 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.047632 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.048059 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.050283 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.050289 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.051457 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.059460 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.059759 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.059961 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.061049 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.061480 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.094648 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148414 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148481 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148526 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h8qd\" (UniqueName: \"kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148551 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148593 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148646 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148719 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148756 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.148781 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-269lr\" (UniqueName: \"kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.150184 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.150237 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.151343 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.155114 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.163797 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h8qd\" (UniqueName: \"kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd\") pod \"controller-manager-75bb457f55-mv2lg\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.249748 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.249802 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.249826 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-269lr\" (UniqueName: \"kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.249875 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.250744 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.250972 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.253255 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.263482 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-269lr\" (UniqueName: \"kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr\") pod \"route-controller-manager-d97b4688c-gmcdk\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.361995 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.384688 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.546614 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.596257 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.724618 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" event={"ID":"72a67835-5ae4-4017-83bd-6842d365eb61","Type":"ContainerStarted","Data":"99e22ed3309cde97e10023e320b2319662599cf01c510627726a05146acc5116"} Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.729246 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" event={"ID":"1ecbbc62-ee30-43b2-898e-5d69693c8659","Type":"ContainerStarted","Data":"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470"} Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.729284 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" event={"ID":"1ecbbc62-ee30-43b2-898e-5d69693c8659","Type":"ContainerStarted","Data":"97a33d551837d3fe2772aa78e78a04942e6225f0d4833f8e08a1c4120ba5a7ba"} Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.730412 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.731563 4591 patch_prober.go:28] interesting pod/controller-manager-75bb457f55-mv2lg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.731606 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Dec 03 12:10:33 crc kubenswrapper[4591]: I1203 12:10:33.743221 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" podStartSLOduration=2.743197118 podStartE2EDuration="2.743197118s" podCreationTimestamp="2025-12-03 12:10:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:10:33.74141551 +0000 UTC m=+331.168455280" watchObservedRunningTime="2025-12-03 12:10:33.743197118 +0000 UTC m=+331.170236889" Dec 03 12:10:34 crc kubenswrapper[4591]: I1203 12:10:34.736854 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" event={"ID":"72a67835-5ae4-4017-83bd-6842d365eb61","Type":"ContainerStarted","Data":"037e997e5b374ebf4ed9aed5d9df23806458678e0194e366b458cfd66a65c2b1"} Dec 03 12:10:34 crc kubenswrapper[4591]: I1203 12:10:34.741951 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:34 crc kubenswrapper[4591]: I1203 12:10:34.755954 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" podStartSLOduration=3.755934435 podStartE2EDuration="3.755934435s" podCreationTimestamp="2025-12-03 12:10:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:10:34.753255914 +0000 UTC m=+332.180295694" watchObservedRunningTime="2025-12-03 12:10:34.755934435 +0000 UTC m=+332.182974204" Dec 03 12:10:35 crc kubenswrapper[4591]: I1203 12:10:35.741662 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:35 crc kubenswrapper[4591]: I1203 12:10:35.746461 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:10:44 crc kubenswrapper[4591]: I1203 12:10:44.818568 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:10:44 crc kubenswrapper[4591]: E1203 12:10:44.819022 4591 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:44 crc kubenswrapper[4591]: E1203 12:10:44.819391 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates podName:f2f2077f-a0e2-407a-837b-6a90ae5f7ebe nodeName:}" failed. No retries permitted until 2025-12-03 12:11:16.819360587 +0000 UTC m=+374.246400357 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-6lr5t" (UID: "f2f2077f-a0e2-407a-837b-6a90ae5f7ebe") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:10:55 crc kubenswrapper[4591]: I1203 12:10:55.300027 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:10:55 crc kubenswrapper[4591]: I1203 12:10:55.300725 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:10:55 crc kubenswrapper[4591]: I1203 12:10:55.730746 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:55 crc kubenswrapper[4591]: I1203 12:10:55.731117 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerName="controller-manager" containerID="cri-o://e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470" gracePeriod=30 Dec 03 12:10:55 crc kubenswrapper[4591]: E1203 12:10:55.843260 4591 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ecbbc62_ee30_43b2_898e_5d69693c8659.slice/crio-conmon-e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470.scope\": RecentStats: unable to find data in memory cache]" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.228191 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.360271 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert\") pod \"1ecbbc62-ee30-43b2-898e-5d69693c8659\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.360330 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h8qd\" (UniqueName: \"kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd\") pod \"1ecbbc62-ee30-43b2-898e-5d69693c8659\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.360369 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles\") pod \"1ecbbc62-ee30-43b2-898e-5d69693c8659\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.360391 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca\") pod \"1ecbbc62-ee30-43b2-898e-5d69693c8659\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.360455 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config\") pod \"1ecbbc62-ee30-43b2-898e-5d69693c8659\" (UID: \"1ecbbc62-ee30-43b2-898e-5d69693c8659\") " Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.361341 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "1ecbbc62-ee30-43b2-898e-5d69693c8659" (UID: "1ecbbc62-ee30-43b2-898e-5d69693c8659"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.361368 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca" (OuterVolumeSpecName: "client-ca") pod "1ecbbc62-ee30-43b2-898e-5d69693c8659" (UID: "1ecbbc62-ee30-43b2-898e-5d69693c8659"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.361469 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config" (OuterVolumeSpecName: "config") pod "1ecbbc62-ee30-43b2-898e-5d69693c8659" (UID: "1ecbbc62-ee30-43b2-898e-5d69693c8659"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.366383 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd" (OuterVolumeSpecName: "kube-api-access-2h8qd") pod "1ecbbc62-ee30-43b2-898e-5d69693c8659" (UID: "1ecbbc62-ee30-43b2-898e-5d69693c8659"). InnerVolumeSpecName "kube-api-access-2h8qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.367037 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1ecbbc62-ee30-43b2-898e-5d69693c8659" (UID: "1ecbbc62-ee30-43b2-898e-5d69693c8659"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.461781 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecbbc62-ee30-43b2-898e-5d69693c8659-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.461817 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h8qd\" (UniqueName: \"kubernetes.io/projected/1ecbbc62-ee30-43b2-898e-5d69693c8659-kube-api-access-2h8qd\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.461828 4591 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.461838 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.461847 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecbbc62-ee30-43b2-898e-5d69693c8659-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.860568 4591 generic.go:334] "Generic (PLEG): container finished" podID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerID="e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470" exitCode=0 Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.860663 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.860641 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" event={"ID":"1ecbbc62-ee30-43b2-898e-5d69693c8659","Type":"ContainerDied","Data":"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470"} Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.861145 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75bb457f55-mv2lg" event={"ID":"1ecbbc62-ee30-43b2-898e-5d69693c8659","Type":"ContainerDied","Data":"97a33d551837d3fe2772aa78e78a04942e6225f0d4833f8e08a1c4120ba5a7ba"} Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.861181 4591 scope.go:117] "RemoveContainer" containerID="e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.882022 4591 scope.go:117] "RemoveContainer" containerID="e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470" Dec 03 12:10:56 crc kubenswrapper[4591]: E1203 12:10:56.883162 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470\": container with ID starting with e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470 not found: ID does not exist" containerID="e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.883198 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470"} err="failed to get container status \"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470\": rpc error: code = NotFound desc = could not find container \"e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470\": container with ID starting with e7338fbf32a909e753ab6bd602741b11b1fdfce2ad90f3808a1dcf53ff54d470 not found: ID does not exist" Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.897242 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:56 crc kubenswrapper[4591]: I1203 12:10:56.897278 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-75bb457f55-mv2lg"] Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.060042 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6655c6f95-8hdnz"] Dec 03 12:10:57 crc kubenswrapper[4591]: E1203 12:10:57.060395 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerName="controller-manager" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.060421 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerName="controller-manager" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.060563 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" containerName="controller-manager" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.061204 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.063230 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.064209 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.067080 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.067892 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.067930 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.068335 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.072656 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.073575 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6655c6f95-8hdnz"] Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.175485 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-config\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.175620 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-client-ca\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.175670 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fddd312-0075-44eb-ba14-547649a5a632-serving-cert\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.175894 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-proxy-ca-bundles\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.176031 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl892\" (UniqueName: \"kubernetes.io/projected/9fddd312-0075-44eb-ba14-547649a5a632-kube-api-access-zl892\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.277867 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-config\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.278023 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-client-ca\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.278049 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fddd312-0075-44eb-ba14-547649a5a632-serving-cert\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.278108 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-proxy-ca-bundles\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.278146 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl892\" (UniqueName: \"kubernetes.io/projected/9fddd312-0075-44eb-ba14-547649a5a632-kube-api-access-zl892\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.279353 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-config\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.279817 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-client-ca\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.279931 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9fddd312-0075-44eb-ba14-547649a5a632-proxy-ca-bundles\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.286453 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fddd312-0075-44eb-ba14-547649a5a632-serving-cert\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.296759 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl892\" (UniqueName: \"kubernetes.io/projected/9fddd312-0075-44eb-ba14-547649a5a632-kube-api-access-zl892\") pod \"controller-manager-6655c6f95-8hdnz\" (UID: \"9fddd312-0075-44eb-ba14-547649a5a632\") " pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.377792 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.569332 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6655c6f95-8hdnz"] Dec 03 12:10:57 crc kubenswrapper[4591]: W1203 12:10:57.576656 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fddd312_0075_44eb_ba14_547649a5a632.slice/crio-66351ffe840fa1ebe70e32a0665d0c8f4acae6ebfff6b66478351fbe21d82d23 WatchSource:0}: Error finding container 66351ffe840fa1ebe70e32a0665d0c8f4acae6ebfff6b66478351fbe21d82d23: Status 404 returned error can't find the container with id 66351ffe840fa1ebe70e32a0665d0c8f4acae6ebfff6b66478351fbe21d82d23 Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.866649 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" event={"ID":"9fddd312-0075-44eb-ba14-547649a5a632","Type":"ContainerStarted","Data":"306d2d9ef89633c173cc29ec94cbd3628a49599def944ee55fc4e1f9f2594927"} Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.866716 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" event={"ID":"9fddd312-0075-44eb-ba14-547649a5a632","Type":"ContainerStarted","Data":"66351ffe840fa1ebe70e32a0665d0c8f4acae6ebfff6b66478351fbe21d82d23"} Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.866735 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.886625 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" podStartSLOduration=2.886610214 podStartE2EDuration="2.886610214s" podCreationTimestamp="2025-12-03 12:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:10:57.883727569 +0000 UTC m=+355.310767340" watchObservedRunningTime="2025-12-03 12:10:57.886610214 +0000 UTC m=+355.313649985" Dec 03 12:10:57 crc kubenswrapper[4591]: I1203 12:10:57.908915 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6655c6f95-8hdnz" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.491573 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8npdh"] Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.492541 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.504087 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8npdh"] Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598424 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-trusted-ca\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598483 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l7nn\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-kube-api-access-7l7nn\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598514 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598748 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-certificates\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598795 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598849 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-bound-sa-token\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.598920 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-tls\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.599005 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.619493 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700161 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-trusted-ca\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700222 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l7nn\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-kube-api-access-7l7nn\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700250 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700295 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-certificates\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700313 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700338 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-bound-sa-token\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.700373 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-tls\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.701714 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.702850 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-certificates\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.702868 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-trusted-ca\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.708099 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-registry-tls\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.708161 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.716639 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-bound-sa-token\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.717561 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l7nn\" (UniqueName: \"kubernetes.io/projected/2c98f726-898c-4c5a-ae13-0e7cfd146cb3-kube-api-access-7l7nn\") pod \"image-registry-66df7c8f76-8npdh\" (UID: \"2c98f726-898c-4c5a-ae13-0e7cfd146cb3\") " pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.806409 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:58 crc kubenswrapper[4591]: I1203 12:10:58.907242 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ecbbc62-ee30-43b2-898e-5d69693c8659" path="/var/lib/kubelet/pods/1ecbbc62-ee30-43b2-898e-5d69693c8659/volumes" Dec 03 12:10:59 crc kubenswrapper[4591]: I1203 12:10:59.222164 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8npdh"] Dec 03 12:10:59 crc kubenswrapper[4591]: I1203 12:10:59.881980 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" event={"ID":"2c98f726-898c-4c5a-ae13-0e7cfd146cb3","Type":"ContainerStarted","Data":"6ec7fd0f9e9fc2a56d28ec33fd247e4a3429c8f9426cb74185b2da5b78832c0c"} Dec 03 12:10:59 crc kubenswrapper[4591]: I1203 12:10:59.882469 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:10:59 crc kubenswrapper[4591]: I1203 12:10:59.882489 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" event={"ID":"2c98f726-898c-4c5a-ae13-0e7cfd146cb3","Type":"ContainerStarted","Data":"0fdf7434e4788fac08c24d771faa1a88a4b476390ce53ee2e76cf8ac4af5dd8e"} Dec 03 12:10:59 crc kubenswrapper[4591]: I1203 12:10:59.899097 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" podStartSLOduration=1.899058314 podStartE2EDuration="1.899058314s" podCreationTimestamp="2025-12-03 12:10:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:10:59.896590231 +0000 UTC m=+357.323629990" watchObservedRunningTime="2025-12-03 12:10:59.899058314 +0000 UTC m=+357.326098084" Dec 03 12:11:16 crc kubenswrapper[4591]: I1203 12:11:16.870621 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:11:16 crc kubenswrapper[4591]: I1203 12:11:16.876895 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/f2f2077f-a0e2-407a-837b-6a90ae5f7ebe-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-6lr5t\" (UID: \"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:11:17 crc kubenswrapper[4591]: I1203 12:11:17.140532 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:11:17 crc kubenswrapper[4591]: I1203 12:11:17.511340 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t"] Dec 03 12:11:17 crc kubenswrapper[4591]: I1203 12:11:17.985381 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" event={"ID":"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe","Type":"ContainerStarted","Data":"940edda5eed03fd9c203a9ec60ea86ee1ba35bd52a3e2fddce2944a030dbf06c"} Dec 03 12:11:18 crc kubenswrapper[4591]: I1203 12:11:18.813046 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-8npdh" Dec 03 12:11:18 crc kubenswrapper[4591]: I1203 12:11:18.872618 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:11:18 crc kubenswrapper[4591]: I1203 12:11:18.992561 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" event={"ID":"f2f2077f-a0e2-407a-837b-6a90ae5f7ebe","Type":"ContainerStarted","Data":"905714313d99bf97bad4b46bdbb24a64e5104f8ab80e4aa2f47a2b7eeb3a6a22"} Dec 03 12:11:18 crc kubenswrapper[4591]: I1203 12:11:18.992866 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:11:18 crc kubenswrapper[4591]: I1203 12:11:18.999532 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.007015 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-6lr5t" podStartSLOduration=65.697679189 podStartE2EDuration="1m7.006996877s" podCreationTimestamp="2025-12-03 12:10:12 +0000 UTC" firstStartedPulling="2025-12-03 12:11:17.519166812 +0000 UTC m=+374.946206581" lastFinishedPulling="2025-12-03 12:11:18.8284845 +0000 UTC m=+376.255524269" observedRunningTime="2025-12-03 12:11:19.002623881 +0000 UTC m=+376.429663651" watchObservedRunningTime="2025-12-03 12:11:19.006996877 +0000 UTC m=+376.434036647" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.975350 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-j6gmj"] Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.976515 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.978223 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-dockercfg-nv5ww" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.978262 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-tls" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.978328 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-kube-rbac-proxy-config" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.978593 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-client-ca" Dec 03 12:11:19 crc kubenswrapper[4591]: I1203 12:11:19.986558 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-j6gmj"] Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.122886 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.123044 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5nhx\" (UniqueName: \"kubernetes.io/projected/3a647d99-405b-4e0f-ac06-9f854150e2f1-kube-api-access-v5nhx\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.123244 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/3a647d99-405b-4e0f-ac06-9f854150e2f1-metrics-client-ca\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.123343 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.225325 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/3a647d99-405b-4e0f-ac06-9f854150e2f1-metrics-client-ca\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.225388 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.225433 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.225485 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5nhx\" (UniqueName: \"kubernetes.io/projected/3a647d99-405b-4e0f-ac06-9f854150e2f1-kube-api-access-v5nhx\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.226645 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/3a647d99-405b-4e0f-ac06-9f854150e2f1-metrics-client-ca\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.231448 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.232012 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a647d99-405b-4e0f-ac06-9f854150e2f1-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.240410 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5nhx\" (UniqueName: \"kubernetes.io/projected/3a647d99-405b-4e0f-ac06-9f854150e2f1-kube-api-access-v5nhx\") pod \"prometheus-operator-db54df47d-j6gmj\" (UID: \"3a647d99-405b-4e0f-ac06-9f854150e2f1\") " pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.291238 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" Dec 03 12:11:20 crc kubenswrapper[4591]: I1203 12:11:20.660973 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-j6gmj"] Dec 03 12:11:21 crc kubenswrapper[4591]: I1203 12:11:21.003397 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" event={"ID":"3a647d99-405b-4e0f-ac06-9f854150e2f1","Type":"ContainerStarted","Data":"d55abbf063fd4845fea0ae6ae6ac062acfffd56ea2b032cc238d71c943e7a5a6"} Dec 03 12:11:23 crc kubenswrapper[4591]: I1203 12:11:23.015320 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" event={"ID":"3a647d99-405b-4e0f-ac06-9f854150e2f1","Type":"ContainerStarted","Data":"c497e1b2f8852bf85d881a527a2047333ef5bcfb76e5fb277234dbeda6601d44"} Dec 03 12:11:23 crc kubenswrapper[4591]: I1203 12:11:23.015641 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" event={"ID":"3a647d99-405b-4e0f-ac06-9f854150e2f1","Type":"ContainerStarted","Data":"de9d5500a8e2686255dcb9818051f26186a5e7378047ea4138d72cff5a00e33d"} Dec 03 12:11:23 crc kubenswrapper[4591]: I1203 12:11:23.029487 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-db54df47d-j6gmj" podStartSLOduration=2.522233428 podStartE2EDuration="4.02946618s" podCreationTimestamp="2025-12-03 12:11:19 +0000 UTC" firstStartedPulling="2025-12-03 12:11:20.670714361 +0000 UTC m=+378.097754131" lastFinishedPulling="2025-12-03 12:11:22.177947112 +0000 UTC m=+379.604986883" observedRunningTime="2025-12-03 12:11:23.027874218 +0000 UTC m=+380.454914008" watchObservedRunningTime="2025-12-03 12:11:23.02946618 +0000 UTC m=+380.456505950" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.299633 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.299935 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.299983 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.300589 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.300639 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359" gracePeriod=600 Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.303186 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx"] Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.304174 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: W1203 12:11:25.306472 4591 reflector.go:561] object-"openshift-monitoring"/"openshift-state-metrics-tls": failed to list *v1.Secret: secrets "openshift-state-metrics-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-monitoring": no relationship found between node 'crc' and this object Dec 03 12:11:25 crc kubenswrapper[4591]: E1203 12:11:25.306510 4591 reflector.go:158] "Unhandled Error" err="object-\"openshift-monitoring\"/\"openshift-state-metrics-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-state-metrics-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-monitoring\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.306707 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-dockercfg-jsxb5" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.311710 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd"] Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.312682 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.314653 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.314768 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.314851 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f455\" (UniqueName: \"kubernetes.io/projected/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-kube-api-access-9f455\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.315004 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.319909 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-kube-rbac-proxy-config" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.319941 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-kube-rbac-proxy-config" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.320130 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-tls" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.320256 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-state-metrics-custom-resource-state-configmap" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.322259 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-dockercfg-mbgqm" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.337534 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd"] Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.346631 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/node-exporter-jrh9g"] Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.347753 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.348968 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-dockercfg-gk9db" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.349285 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-kube-rbac-proxy-config" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.349456 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-tls" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.361545 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx"] Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416640 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-wtmp\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416682 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416837 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/67d783d1-f7d1-48e4-b989-8959dcca86ad-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416872 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8c3ee345-cde3-4113-9f8f-daa01780d392-metrics-client-ca\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416899 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416924 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416948 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416971 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f455\" (UniqueName: \"kubernetes.io/projected/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-kube-api-access-9f455\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.416993 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mshpd\" (UniqueName: \"kubernetes.io/projected/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-api-access-mshpd\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417012 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-sys\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417038 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417073 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417096 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-textfile\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417113 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-root\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417127 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417190 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417205 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-tls\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.417224 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxs5s\" (UniqueName: \"kubernetes.io/projected/8c3ee345-cde3-4113-9f8f-daa01780d392-kube-api-access-dxs5s\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.418409 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.428708 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.435663 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f455\" (UniqueName: \"kubernetes.io/projected/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-kube-api-access-9f455\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518220 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mshpd\" (UniqueName: \"kubernetes.io/projected/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-api-access-mshpd\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518273 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-sys\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518308 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518333 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518367 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-textfile\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518400 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-root\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518416 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518485 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-tls\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518511 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxs5s\" (UniqueName: \"kubernetes.io/projected/8c3ee345-cde3-4113-9f8f-daa01780d392-kube-api-access-dxs5s\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518536 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-wtmp\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518555 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518596 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/67d783d1-f7d1-48e4-b989-8959dcca86ad-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518631 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8c3ee345-cde3-4113-9f8f-daa01780d392-metrics-client-ca\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.518665 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: E1203 12:11:25.519473 4591 secret.go:188] Couldn't get secret openshift-monitoring/kube-state-metrics-tls: secret "kube-state-metrics-tls" not found Dec 03 12:11:25 crc kubenswrapper[4591]: E1203 12:11:25.519571 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls podName:67d783d1-f7d1-48e4-b989-8959dcca86ad nodeName:}" failed. No retries permitted until 2025-12-03 12:11:26.019545206 +0000 UTC m=+383.446584977 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls" (UniqueName: "kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls") pod "kube-state-metrics-777cb5bd5d-f62dd" (UID: "67d783d1-f7d1-48e4-b989-8959dcca86ad") : secret "kube-state-metrics-tls" not found Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.519699 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"root\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-root\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.519858 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-sys\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.520290 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-textfile\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.520430 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-wtmp\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.520576 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8c3ee345-cde3-4113-9f8f-daa01780d392-metrics-client-ca\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.520793 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.522081 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/67d783d1-f7d1-48e4-b989-8959dcca86ad-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.522470 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.523430 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.523813 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/8c3ee345-cde3-4113-9f8f-daa01780d392-node-exporter-tls\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.534620 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.536158 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxs5s\" (UniqueName: \"kubernetes.io/projected/8c3ee345-cde3-4113-9f8f-daa01780d392-kube-api-access-dxs5s\") pod \"node-exporter-jrh9g\" (UID: \"8c3ee345-cde3-4113-9f8f-daa01780d392\") " pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.539747 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mshpd\" (UniqueName: \"kubernetes.io/projected/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-api-access-mshpd\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:25 crc kubenswrapper[4591]: I1203 12:11:25.657957 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-jrh9g" Dec 03 12:11:25 crc kubenswrapper[4591]: W1203 12:11:25.679103 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c3ee345_cde3_4113_9f8f_daa01780d392.slice/crio-e7e0cb6765e1057a5722a063a24ffb21583ebd0050fc7178f8a631be5afff6e9 WatchSource:0}: Error finding container e7e0cb6765e1057a5722a063a24ffb21583ebd0050fc7178f8a631be5afff6e9: Status 404 returned error can't find the container with id e7e0cb6765e1057a5722a063a24ffb21583ebd0050fc7178f8a631be5afff6e9 Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.025845 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.034214 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/67d783d1-f7d1-48e4-b989-8959dcca86ad-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-f62dd\" (UID: \"67d783d1-f7d1-48e4-b989-8959dcca86ad\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.036042 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-jrh9g" event={"ID":"8c3ee345-cde3-4113-9f8f-daa01780d392","Type":"ContainerStarted","Data":"e7e0cb6765e1057a5722a063a24ffb21583ebd0050fc7178f8a631be5afff6e9"} Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.039048 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359" exitCode=0 Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.039092 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359"} Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.039159 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247"} Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.039199 4591 scope.go:117] "RemoveContainer" containerID="149401abb6951e6e00d6acdc5cecd9a62bc01d1d491c651bfcad2c2988f5e5cf" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.232473 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.374225 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.377047 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.387736 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-web" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.387937 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-metric" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388134 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-generated" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388322 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls-assets-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388370 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-web-config" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388337 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388770 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-dockercfg-pll6m" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.388945 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.395206 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.395215 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-tls" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.396723 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"alertmanager-trusted-ca-bundle" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.403674 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-jk8xx\" (UID: \"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433182 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-config-volume\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433231 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-config-out\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433259 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433282 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-tls-assets\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433311 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-web-config\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433336 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jx65\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-kube-api-access-9jx65\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433363 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433500 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433531 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433581 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433607 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.433626 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.519545 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535522 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-config-volume\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535581 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-config-out\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535619 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535648 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-tls-assets\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535689 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-web-config\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535721 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jx65\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-kube-api-access-9jx65\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535751 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535813 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535844 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535901 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535938 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.535959 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.536608 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.540667 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/8d4d2d45-debc-475d-a959-e222b806f7d6-config-out\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.540811 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.540899 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-config-volume\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.541232 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8d4d2d45-debc-475d-a959-e222b806f7d6-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.541266 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-web-config\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.541571 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-tls-assets\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.542171 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.542219 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.543288 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.551303 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jx65\" (UniqueName: \"kubernetes.io/projected/8d4d2d45-debc-475d-a959-e222b806f7d6-kube-api-access-9jx65\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.554305 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/8d4d2d45-debc-475d-a959-e222b806f7d6-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"8d4d2d45-debc-475d-a959-e222b806f7d6\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.681979 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd"] Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.701514 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:11:26 crc kubenswrapper[4591]: I1203 12:11:26.906010 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx"] Dec 03 12:11:26 crc kubenswrapper[4591]: W1203 12:11:26.922326 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d815d46_0ef0_4ad7_9dec_7a6dbeea91ff.slice/crio-80418fd0424485280aec94d46c0599dc7c55fca87613ef4c4156a9df80ee4718 WatchSource:0}: Error finding container 80418fd0424485280aec94d46c0599dc7c55fca87613ef4c4156a9df80ee4718: Status 404 returned error can't find the container with id 80418fd0424485280aec94d46c0599dc7c55fca87613ef4c4156a9df80ee4718 Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.047348 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" event={"ID":"67d783d1-f7d1-48e4-b989-8959dcca86ad","Type":"ContainerStarted","Data":"56f6883c7a6dc769d0833be17cbe6e97607cc1f6ae037f83f4d8f19657d28eb5"} Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.050172 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-jrh9g" event={"ID":"8c3ee345-cde3-4113-9f8f-daa01780d392","Type":"ContainerStarted","Data":"4195cf6d01dc770b121a5f83fb73371533bb2e016454d1cbba0cb097af6eb0ec"} Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.055862 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" event={"ID":"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff","Type":"ContainerStarted","Data":"0e9041803f1dc6d5e62a434b77bc57590b834aad1fd23bf46ab725c3764d09ac"} Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.055928 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" event={"ID":"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff","Type":"ContainerStarted","Data":"80418fd0424485280aec94d46c0599dc7c55fca87613ef4c4156a9df80ee4718"} Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.184869 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:11:27 crc kubenswrapper[4591]: W1203 12:11:27.187922 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d4d2d45_debc_475d_a959_e222b806f7d6.slice/crio-fa8f4b33376b43ef89ded0ffa49c9dc7791dc3b61cea54a1125cd53398874cec WatchSource:0}: Error finding container fa8f4b33376b43ef89ded0ffa49c9dc7791dc3b61cea54a1125cd53398874cec: Status 404 returned error can't find the container with id fa8f4b33376b43ef89ded0ffa49c9dc7791dc3b61cea54a1125cd53398874cec Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.283497 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4"] Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.285027 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.286854 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.287010 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-metrics" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.288574 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-tls" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.288602 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-grpc-tls-a7qfjgv4ksu5m" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.288618 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-rules" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.288610 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-dockercfg-9wbfr" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.289402 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-web" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.300697 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4"] Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.350534 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.350606 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.350655 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.351021 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.351197 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-metrics-client-ca\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.351246 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-grpc-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.351517 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.351580 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5g8f\" (UniqueName: \"kubernetes.io/projected/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-kube-api-access-r5g8f\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.453408 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-metrics-client-ca\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.454001 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-grpc-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.454202 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.454242 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5g8f\" (UniqueName: \"kubernetes.io/projected/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-kube-api-access-r5g8f\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.455080 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.455106 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.455132 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.455176 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.455980 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-metrics-client-ca\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.460519 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-grpc-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.460642 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.460977 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.461293 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.461953 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-tls\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.463435 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.468731 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5g8f\" (UniqueName: \"kubernetes.io/projected/9b15c888-a5fa-4ae4-beaa-e15261e0faa6-kube-api-access-r5g8f\") pod \"thanos-querier-6f9dd6cd7d-m7kb4\" (UID: \"9b15c888-a5fa-4ae4-beaa-e15261e0faa6\") " pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:27 crc kubenswrapper[4591]: I1203 12:11:27.599553 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:28 crc kubenswrapper[4591]: I1203 12:11:28.008348 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4"] Dec 03 12:11:28 crc kubenswrapper[4591]: I1203 12:11:28.087618 4591 generic.go:334] "Generic (PLEG): container finished" podID="8c3ee345-cde3-4113-9f8f-daa01780d392" containerID="4195cf6d01dc770b121a5f83fb73371533bb2e016454d1cbba0cb097af6eb0ec" exitCode=0 Dec 03 12:11:28 crc kubenswrapper[4591]: I1203 12:11:28.087708 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-jrh9g" event={"ID":"8c3ee345-cde3-4113-9f8f-daa01780d392","Type":"ContainerDied","Data":"4195cf6d01dc770b121a5f83fb73371533bb2e016454d1cbba0cb097af6eb0ec"} Dec 03 12:11:28 crc kubenswrapper[4591]: I1203 12:11:28.095549 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" event={"ID":"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff","Type":"ContainerStarted","Data":"3fd5f153ef49ea15fdeb93c0271ab737d8b3332aa281df5e30ed2c8b054c1427"} Dec 03 12:11:28 crc kubenswrapper[4591]: I1203 12:11:28.103674 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"fa8f4b33376b43ef89ded0ffa49c9dc7791dc3b61cea54a1125cd53398874cec"} Dec 03 12:11:28 crc kubenswrapper[4591]: W1203 12:11:28.365323 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b15c888_a5fa_4ae4_beaa_e15261e0faa6.slice/crio-562a3f6927f6cc80c55460a0afa4907170745f29dbcc06ee6045683419c6d66c WatchSource:0}: Error finding container 562a3f6927f6cc80c55460a0afa4907170745f29dbcc06ee6045683419c6d66c: Status 404 returned error can't find the container with id 562a3f6927f6cc80c55460a0afa4907170745f29dbcc06ee6045683419c6d66c Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.111366 4591 generic.go:334] "Generic (PLEG): container finished" podID="8d4d2d45-debc-475d-a959-e222b806f7d6" containerID="7b9b742f94033e73e39f0da92cde6c25e6f11156670043b0c5afcc7735b1eb5e" exitCode=0 Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.111537 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerDied","Data":"7b9b742f94033e73e39f0da92cde6c25e6f11156670043b0c5afcc7735b1eb5e"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.118131 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" event={"ID":"67d783d1-f7d1-48e4-b989-8959dcca86ad","Type":"ContainerStarted","Data":"6033cf54cd7159721d767f7a917aa9777b621a2482692483240a8ebc0b6a9908"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.118175 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" event={"ID":"67d783d1-f7d1-48e4-b989-8959dcca86ad","Type":"ContainerStarted","Data":"09b2ed8378f98e09f595979d641fe2dcf4ebfec03018663cac9d9e63cd1a8a38"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.120409 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-jrh9g" event={"ID":"8c3ee345-cde3-4113-9f8f-daa01780d392","Type":"ContainerStarted","Data":"da6536397b8ce2a77143345d912b4a6167af214e00612f0ec06a2fe787b00eb2"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.120455 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-jrh9g" event={"ID":"8c3ee345-cde3-4113-9f8f-daa01780d392","Type":"ContainerStarted","Data":"4a591691423e8fa7166dcbf55516b57a35b114b4d2c05502c56481ef49aee54b"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.122189 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"562a3f6927f6cc80c55460a0afa4907170745f29dbcc06ee6045683419c6d66c"} Dec 03 12:11:29 crc kubenswrapper[4591]: I1203 12:11:29.152594 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/node-exporter-jrh9g" podStartSLOduration=3.005211201 podStartE2EDuration="4.152566728s" podCreationTimestamp="2025-12-03 12:11:25 +0000 UTC" firstStartedPulling="2025-12-03 12:11:25.683087757 +0000 UTC m=+383.110127527" lastFinishedPulling="2025-12-03 12:11:26.830443294 +0000 UTC m=+384.257483054" observedRunningTime="2025-12-03 12:11:29.147765386 +0000 UTC m=+386.574805156" watchObservedRunningTime="2025-12-03 12:11:29.152566728 +0000 UTC m=+386.579606499" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.108213 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.110324 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.144471 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.154027 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" event={"ID":"2d815d46-0ef0-4ad7-9dec-7a6dbeea91ff","Type":"ContainerStarted","Data":"5a5d6f65da6427406c3479fbff4feace590d86caec840ec9e0f292cad28839dd"} Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.159923 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" event={"ID":"67d783d1-f7d1-48e4-b989-8959dcca86ad","Type":"ContainerStarted","Data":"415a817f0f9beac6a6fbd1c29bed23050045d4d23804f04c87cbe5c9f8c34cd4"} Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.189761 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-f62dd" podStartSLOduration=3.369946664 podStartE2EDuration="5.189739469s" podCreationTimestamp="2025-12-03 12:11:25 +0000 UTC" firstStartedPulling="2025-12-03 12:11:26.809332219 +0000 UTC m=+384.236371988" lastFinishedPulling="2025-12-03 12:11:28.629125023 +0000 UTC m=+386.056164793" observedRunningTime="2025-12-03 12:11:30.186750322 +0000 UTC m=+387.613790093" watchObservedRunningTime="2025-12-03 12:11:30.189739469 +0000 UTC m=+387.616779239" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.196268 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/openshift-state-metrics-566fddb674-jk8xx" podStartSLOduration=3.241581973 podStartE2EDuration="5.196244094s" podCreationTimestamp="2025-12-03 12:11:25 +0000 UTC" firstStartedPulling="2025-12-03 12:11:27.186982353 +0000 UTC m=+384.614022123" lastFinishedPulling="2025-12-03 12:11:29.141644474 +0000 UTC m=+386.568684244" observedRunningTime="2025-12-03 12:11:30.171119169 +0000 UTC m=+387.598158940" watchObservedRunningTime="2025-12-03 12:11:30.196244094 +0000 UTC m=+387.623283864" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.205297 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwmwj\" (UniqueName: \"kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.205565 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.205610 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.205703 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.205790 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.206014 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.206215 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308195 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308251 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308294 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308326 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwmwj\" (UniqueName: \"kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308355 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308376 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.308413 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.309081 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.309203 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.309935 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.309966 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.314769 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.315616 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.325564 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwmwj\" (UniqueName: \"kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj\") pod \"console-798d6cf7d4-x7bph\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.457578 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.639529 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/metrics-server-7b878448f-rfd46"] Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.640410 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.642026 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-tls" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.642098 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kubelet-serving-ca-bundle" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.642284 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-dockercfg-9758m" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.642446 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-client-certs" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.642580 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-server-audit-profiles" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.644746 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-42hjm5bsmkg8c" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.649491 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-7b878448f-rfd46"] Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714195 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-client-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714247 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-server-tls\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714315 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grprz\" (UniqueName: \"kubernetes.io/projected/20c25fc5-aec6-4bd3-aa05-0bd633a272af-kube-api-access-grprz\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714357 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-metrics-server-audit-profiles\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714411 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714438 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-client-certs\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.714458 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/20c25fc5-aec6-4bd3-aa05-0bd633a272af-audit-log\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.815818 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-metrics-server-audit-profiles\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.815996 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.817481 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-metrics-server-audit-profiles\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.817837 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/20c25fc5-aec6-4bd3-aa05-0bd633a272af-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.816113 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-client-certs\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.817920 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/20c25fc5-aec6-4bd3-aa05-0bd633a272af-audit-log\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.818089 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-client-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.818218 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-server-tls\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.818302 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grprz\" (UniqueName: \"kubernetes.io/projected/20c25fc5-aec6-4bd3-aa05-0bd633a272af-kube-api-access-grprz\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.818550 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/20c25fc5-aec6-4bd3-aa05-0bd633a272af-audit-log\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.824465 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-server-tls\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.827713 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-secret-metrics-client-certs\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.828542 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c25fc5-aec6-4bd3-aa05-0bd633a272af-client-ca-bundle\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.833975 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grprz\" (UniqueName: \"kubernetes.io/projected/20c25fc5-aec6-4bd3-aa05-0bd633a272af-kube-api-access-grprz\") pod \"metrics-server-7b878448f-rfd46\" (UID: \"20c25fc5-aec6-4bd3-aa05-0bd633a272af\") " pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:30 crc kubenswrapper[4591]: I1203 12:11:30.969453 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.106709 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.108953 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.112570 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"default-dockercfg-6tstp" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.112712 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"monitoring-plugin-cert" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.119712 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.126576 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/c3364550-d0cb-40a8-9dd7-e7132407d18e-monitoring-plugin-cert\") pod \"monitoring-plugin-7597b5f88c-jsssj\" (UID: \"c3364550-d0cb-40a8-9dd7-e7132407d18e\") " pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.230851 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/c3364550-d0cb-40a8-9dd7-e7132407d18e-monitoring-plugin-cert\") pod \"monitoring-plugin-7597b5f88c-jsssj\" (UID: \"c3364550-d0cb-40a8-9dd7-e7132407d18e\") " pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.235075 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/c3364550-d0cb-40a8-9dd7-e7132407d18e-monitoring-plugin-cert\") pod \"monitoring-plugin-7597b5f88c-jsssj\" (UID: \"c3364550-d0cb-40a8-9dd7-e7132407d18e\") " pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.450867 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.474330 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-7b878448f-rfd46"] Dec 03 12:11:31 crc kubenswrapper[4591]: W1203 12:11:31.492297 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20c25fc5_aec6_4bd3_aa05_0bd633a272af.slice/crio-5be9f144379cbcd134ecfa4bf2f26124f324321e4f45a63b4e153ae50b969832 WatchSource:0}: Error finding container 5be9f144379cbcd134ecfa4bf2f26124f324321e4f45a63b4e153ae50b969832: Status 404 returned error can't find the container with id 5be9f144379cbcd134ecfa4bf2f26124f324321e4f45a63b4e153ae50b969832 Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.548724 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.581858 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.583681 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.589296 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.589464 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"serving-certs-ca-bundle" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.589533 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-rbac-proxy" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.589597 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.590826 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-web-config" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.590878 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-dockercfg-wqzdt" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.590926 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls-assets-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.591038 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-prometheus-http-client-file" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.591164 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-grpc-tls-2f924juudmdm6" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.591200 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-kube-rbac-proxy-web" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.592301 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-sidecar-tls" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.592814 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-k8s-rulefiles-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.596109 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-trusted-ca-bundle" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.614949 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642418 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642478 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-config-out\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642515 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642541 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-web-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642570 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njnxh\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-kube-api-access-njnxh\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642604 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642630 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642691 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642726 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642755 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642792 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642815 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642843 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642870 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642892 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642915 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642943 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.642976 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745185 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745490 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745517 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745540 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745574 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745597 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745636 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745683 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745734 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745793 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745818 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-config-out\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745853 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745876 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-web-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745915 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njnxh\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-kube-api-access-njnxh\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745945 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.745968 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.746029 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.746053 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.746784 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.747483 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.748056 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.748252 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.749190 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.751487 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-web-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.751508 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.751570 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.751617 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.752133 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.752218 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.752638 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.754818 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.755013 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-config\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.755291 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/d7a7f227-865b-445f-91a5-7d567fbcafb2-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.755312 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d7a7f227-865b-445f-91a5-7d567fbcafb2-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.755481 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d7a7f227-865b-445f-91a5-7d567fbcafb2-config-out\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.762646 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njnxh\" (UniqueName: \"kubernetes.io/projected/d7a7f227-865b-445f-91a5-7d567fbcafb2-kube-api-access-njnxh\") pod \"prometheus-k8s-0\" (UID: \"d7a7f227-865b-445f-91a5-7d567fbcafb2\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.906247 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj"] Dec 03 12:11:31 crc kubenswrapper[4591]: I1203 12:11:31.909579 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:31 crc kubenswrapper[4591]: W1203 12:11:31.910945 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3364550_d0cb_40a8_9dd7_e7132407d18e.slice/crio-c97d2deec866bdb7e2fa42474077107f8f9aee1b6dad18a5df05733987ebd711 WatchSource:0}: Error finding container c97d2deec866bdb7e2fa42474077107f8f9aee1b6dad18a5df05733987ebd711: Status 404 returned error can't find the container with id c97d2deec866bdb7e2fa42474077107f8f9aee1b6dad18a5df05733987ebd711 Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.177581 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-798d6cf7d4-x7bph" event={"ID":"6676943b-3f20-4874-9c99-e5da4e28ff07","Type":"ContainerStarted","Data":"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.177935 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-798d6cf7d4-x7bph" event={"ID":"6676943b-3f20-4874-9c99-e5da4e28ff07","Type":"ContainerStarted","Data":"e6a009bf7a2142ef52cabc635512fb6212c5b8a9cda96d020eb29349fbb22aff"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.182699 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"3f03f45154c9288e300f5dc572d72d1febd050c2a15319633186f780628dc538"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.182729 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"8317dde9a8d39222110a641ce1d252de42371c13cf853a8bfac99ac456d10dde"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.182744 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"f2b56367c85ca408c496625d0f14143bc64cf2a94313aa94fc85d14cd1980b16"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.183885 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" event={"ID":"20c25fc5-aec6-4bd3-aa05-0bd633a272af","Type":"ContainerStarted","Data":"5be9f144379cbcd134ecfa4bf2f26124f324321e4f45a63b4e153ae50b969832"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.187641 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"e9639170edaa562538715c6c70aade63eb6bed417fddd097a36b0bf3302ded78"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.187762 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"3dee4dd914051ba1951fe7d8c744f08562264d21f0e9a6eb69926e5815c5b18d"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.187827 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"de4301088d506cb37122966202ba7eabb021babcc62a297b0d8fba871f11d8cf"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.187880 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"bc9e399fc26bfd67e43560bd2f9de9efa6fe0b3403116eace8126f20d32251a0"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.187934 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"dfdb4ca20686de0d4f16c3810d11684ec529f70fc1bd62d54a455e7aacd0e273"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.188761 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" event={"ID":"c3364550-d0cb-40a8-9dd7-e7132407d18e","Type":"ContainerStarted","Data":"c97d2deec866bdb7e2fa42474077107f8f9aee1b6dad18a5df05733987ebd711"} Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.197190 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-798d6cf7d4-x7bph" podStartSLOduration=2.197168525 podStartE2EDuration="2.197168525s" podCreationTimestamp="2025-12-03 12:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:11:32.192731098 +0000 UTC m=+389.619770868" watchObservedRunningTime="2025-12-03 12:11:32.197168525 +0000 UTC m=+389.624208295" Dec 03 12:11:32 crc kubenswrapper[4591]: I1203 12:11:32.332902 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.198542 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"8d4d2d45-debc-475d-a959-e222b806f7d6","Type":"ContainerStarted","Data":"6ac5277824741822c8b8ad0badea6a0bfd5bbaba37856c703dc0134e4430a5c6"} Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.200759 4591 generic.go:334] "Generic (PLEG): container finished" podID="d7a7f227-865b-445f-91a5-7d567fbcafb2" containerID="91ac65296d292187a9e964467d13975e1de330ac0d2045ea36958b29c747ed33" exitCode=0 Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.200828 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerDied","Data":"91ac65296d292187a9e964467d13975e1de330ac0d2045ea36958b29c747ed33"} Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.200861 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"5ac16f09e504e16579f74775f24a450ef5a8feaa1d388189374accc27bd5bc0f"} Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.207884 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"b72050d6795126b9e31dc601a77790ead4fc3bc058ecc9119b4fcc28c2933bd8"} Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.207915 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"3968dc03b84e750754608d13051804f256708bd0e403f2654f4ba8b3ab9f5ece"} Dec 03 12:11:33 crc kubenswrapper[4591]: I1203 12:11:33.221123 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/alertmanager-main-0" podStartSLOduration=1.720801346 podStartE2EDuration="7.221102152s" podCreationTimestamp="2025-12-03 12:11:26 +0000 UTC" firstStartedPulling="2025-12-03 12:11:27.190010483 +0000 UTC m=+384.617050253" lastFinishedPulling="2025-12-03 12:11:32.69031129 +0000 UTC m=+390.117351059" observedRunningTime="2025-12-03 12:11:33.218874653 +0000 UTC m=+390.645914422" watchObservedRunningTime="2025-12-03 12:11:33.221102152 +0000 UTC m=+390.648141922" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.220340 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" event={"ID":"9b15c888-a5fa-4ae4-beaa-e15261e0faa6","Type":"ContainerStarted","Data":"7258a9144a1b131c7ff817011ad9aa0a8a57a07dfcefc4b2e1a52b24d6f8b735"} Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.221026 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.222318 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" event={"ID":"20c25fc5-aec6-4bd3-aa05-0bd633a272af","Type":"ContainerStarted","Data":"0781fbfb26250f1f508ccf654a09e449813705cb82e9a039c0fc9981b398f4ee"} Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.223899 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" event={"ID":"c3364550-d0cb-40a8-9dd7-e7132407d18e","Type":"ContainerStarted","Data":"0e11d7da48f6f55ea53eae7b2f052431f9a51a6a7578bdb35f94306ba8105729"} Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.224419 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.233438 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.251802 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" podStartSLOduration=2.926085752 podStartE2EDuration="7.251786038s" podCreationTimestamp="2025-12-03 12:11:27 +0000 UTC" firstStartedPulling="2025-12-03 12:11:28.370322002 +0000 UTC m=+385.797361772" lastFinishedPulling="2025-12-03 12:11:32.696022288 +0000 UTC m=+390.123062058" observedRunningTime="2025-12-03 12:11:34.2434341 +0000 UTC m=+391.670473880" watchObservedRunningTime="2025-12-03 12:11:34.251786038 +0000 UTC m=+391.678825807" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.264902 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/monitoring-plugin-7597b5f88c-jsssj" podStartSLOduration=1.313629651 podStartE2EDuration="3.264884576s" podCreationTimestamp="2025-12-03 12:11:31 +0000 UTC" firstStartedPulling="2025-12-03 12:11:31.915896247 +0000 UTC m=+389.342936017" lastFinishedPulling="2025-12-03 12:11:33.867151172 +0000 UTC m=+391.294190942" observedRunningTime="2025-12-03 12:11:34.259413169 +0000 UTC m=+391.686452939" watchObservedRunningTime="2025-12-03 12:11:34.264884576 +0000 UTC m=+391.691924346" Dec 03 12:11:34 crc kubenswrapper[4591]: I1203 12:11:34.283717 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" podStartSLOduration=1.9148516070000001 podStartE2EDuration="4.283702238s" podCreationTimestamp="2025-12-03 12:11:30 +0000 UTC" firstStartedPulling="2025-12-03 12:11:31.495483539 +0000 UTC m=+388.922523310" lastFinishedPulling="2025-12-03 12:11:33.864334171 +0000 UTC m=+391.291373941" observedRunningTime="2025-12-03 12:11:34.279677078 +0000 UTC m=+391.706716848" watchObservedRunningTime="2025-12-03 12:11:34.283702238 +0000 UTC m=+391.710742008" Dec 03 12:11:35 crc kubenswrapper[4591]: I1203 12:11:35.749071 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:11:35 crc kubenswrapper[4591]: I1203 12:11:35.749787 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" podUID="72a67835-5ae4-4017-83bd-6842d365eb61" containerName="route-controller-manager" containerID="cri-o://037e997e5b374ebf4ed9aed5d9df23806458678e0194e366b458cfd66a65c2b1" gracePeriod=30 Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.244912 4591 generic.go:334] "Generic (PLEG): container finished" podID="72a67835-5ae4-4017-83bd-6842d365eb61" containerID="037e997e5b374ebf4ed9aed5d9df23806458678e0194e366b458cfd66a65c2b1" exitCode=0 Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.245001 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" event={"ID":"72a67835-5ae4-4017-83bd-6842d365eb61","Type":"ContainerDied","Data":"037e997e5b374ebf4ed9aed5d9df23806458678e0194e366b458cfd66a65c2b1"} Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.350463 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.445704 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-269lr\" (UniqueName: \"kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr\") pod \"72a67835-5ae4-4017-83bd-6842d365eb61\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.446274 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert\") pod \"72a67835-5ae4-4017-83bd-6842d365eb61\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.446979 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca\") pod \"72a67835-5ae4-4017-83bd-6842d365eb61\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.447043 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config\") pod \"72a67835-5ae4-4017-83bd-6842d365eb61\" (UID: \"72a67835-5ae4-4017-83bd-6842d365eb61\") " Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.448348 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config" (OuterVolumeSpecName: "config") pod "72a67835-5ae4-4017-83bd-6842d365eb61" (UID: "72a67835-5ae4-4017-83bd-6842d365eb61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.448455 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca" (OuterVolumeSpecName: "client-ca") pod "72a67835-5ae4-4017-83bd-6842d365eb61" (UID: "72a67835-5ae4-4017-83bd-6842d365eb61"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.450152 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr" (OuterVolumeSpecName: "kube-api-access-269lr") pod "72a67835-5ae4-4017-83bd-6842d365eb61" (UID: "72a67835-5ae4-4017-83bd-6842d365eb61"). InnerVolumeSpecName "kube-api-access-269lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.450765 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "72a67835-5ae4-4017-83bd-6842d365eb61" (UID: "72a67835-5ae4-4017-83bd-6842d365eb61"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.549902 4591 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72a67835-5ae4-4017-83bd-6842d365eb61-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.549933 4591 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.549944 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72a67835-5ae4-4017-83bd-6842d365eb61-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:36 crc kubenswrapper[4591]: I1203 12:11:36.549955 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-269lr\" (UniqueName: \"kubernetes.io/projected/72a67835-5ae4-4017-83bd-6842d365eb61-kube-api-access-269lr\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.089438 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48"] Dec 03 12:11:37 crc kubenswrapper[4591]: E1203 12:11:37.089691 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a67835-5ae4-4017-83bd-6842d365eb61" containerName="route-controller-manager" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.089704 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a67835-5ae4-4017-83bd-6842d365eb61" containerName="route-controller-manager" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.089831 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="72a67835-5ae4-4017-83bd-6842d365eb61" containerName="route-controller-manager" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.090275 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.099040 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48"] Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.158120 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-config\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.158153 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5e75ef5-a1c1-475d-b232-60033479c97d-serving-cert\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.158223 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-client-ca\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.158351 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh5bf\" (UniqueName: \"kubernetes.io/projected/e5e75ef5-a1c1-475d-b232-60033479c97d-kube-api-access-vh5bf\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.254356 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" event={"ID":"72a67835-5ae4-4017-83bd-6842d365eb61","Type":"ContainerDied","Data":"99e22ed3309cde97e10023e320b2319662599cf01c510627726a05146acc5116"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.254378 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.254452 4591 scope.go:117] "RemoveContainer" containerID="037e997e5b374ebf4ed9aed5d9df23806458678e0194e366b458cfd66a65c2b1" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.259640 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh5bf\" (UniqueName: \"kubernetes.io/projected/e5e75ef5-a1c1-475d-b232-60033479c97d-kube-api-access-vh5bf\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.259756 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5e75ef5-a1c1-475d-b232-60033479c97d-serving-cert\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.259793 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-config\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.259889 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-client-ca\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.260907 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-client-ca\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.261777 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5e75ef5-a1c1-475d-b232-60033479c97d-config\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262538 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"dc9f3e36a84572278d570853508479e6e2deafe20842cdb13edb72000e56fed0"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262599 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"5e384a6bab2120327d596fd0fd94cfd0fa7156909513ef3892bb2d631a1ec9ed"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262619 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"f2b3c29b5059383c0f6a2d924ffb17c8ba82baa54881989860395ee08687fefe"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262631 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"35d3e4ad042e4c629ba9c1be170a7a810aa236b323dc4d5228fb245b46699227"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262643 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"cf6d6dce51d064a9e5b0029625225a3dc4eeee4cf800fa2cf39628fcb087791d"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.262655 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"d7a7f227-865b-445f-91a5-7d567fbcafb2","Type":"ContainerStarted","Data":"bd607f23f6bb84605f1a1307747cd1098adc08edf8e84147fde6924f9c9d914f"} Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.264962 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5e75ef5-a1c1-475d-b232-60033479c97d-serving-cert\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.274607 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.279158 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d97b4688c-gmcdk"] Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.281860 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh5bf\" (UniqueName: \"kubernetes.io/projected/e5e75ef5-a1c1-475d-b232-60033479c97d-kube-api-access-vh5bf\") pod \"route-controller-manager-6f674d84b5-zmb48\" (UID: \"e5e75ef5-a1c1-475d-b232-60033479c97d\") " pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.301668 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-k8s-0" podStartSLOduration=3.403088544 podStartE2EDuration="6.301650902s" podCreationTimestamp="2025-12-03 12:11:31 +0000 UTC" firstStartedPulling="2025-12-03 12:11:33.201874377 +0000 UTC m=+390.628914148" lastFinishedPulling="2025-12-03 12:11:36.100436736 +0000 UTC m=+393.527476506" observedRunningTime="2025-12-03 12:11:37.295674012 +0000 UTC m=+394.722713782" watchObservedRunningTime="2025-12-03 12:11:37.301650902 +0000 UTC m=+394.728690671" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.406770 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.609172 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/thanos-querier-6f9dd6cd7d-m7kb4" Dec 03 12:11:37 crc kubenswrapper[4591]: I1203 12:11:37.793652 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48"] Dec 03 12:11:37 crc kubenswrapper[4591]: W1203 12:11:37.796868 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5e75ef5_a1c1_475d_b232_60033479c97d.slice/crio-a8a8e942fe5969b0ee55ef9981071448271320f4d9e5db01d7170b5e7e86531f WatchSource:0}: Error finding container a8a8e942fe5969b0ee55ef9981071448271320f4d9e5db01d7170b5e7e86531f: Status 404 returned error can't find the container with id a8a8e942fe5969b0ee55ef9981071448271320f4d9e5db01d7170b5e7e86531f Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.272727 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" event={"ID":"e5e75ef5-a1c1-475d-b232-60033479c97d","Type":"ContainerStarted","Data":"f91ee1666857a9deac4c8286e4e4e4b26916d745ee68920dd7b1243938bf4505"} Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.273162 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" event={"ID":"e5e75ef5-a1c1-475d-b232-60033479c97d","Type":"ContainerStarted","Data":"a8a8e942fe5969b0ee55ef9981071448271320f4d9e5db01d7170b5e7e86531f"} Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.273219 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.293214 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" podStartSLOduration=3.293188786 podStartE2EDuration="3.293188786s" podCreationTimestamp="2025-12-03 12:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:11:38.290829988 +0000 UTC m=+395.717869758" watchObservedRunningTime="2025-12-03 12:11:38.293188786 +0000 UTC m=+395.720228546" Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.659106 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6f674d84b5-zmb48" Dec 03 12:11:38 crc kubenswrapper[4591]: I1203 12:11:38.898564 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72a67835-5ae4-4017-83bd-6842d365eb61" path="/var/lib/kubelet/pods/72a67835-5ae4-4017-83bd-6842d365eb61/volumes" Dec 03 12:11:40 crc kubenswrapper[4591]: I1203 12:11:40.458267 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:40 crc kubenswrapper[4591]: I1203 12:11:40.458737 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:40 crc kubenswrapper[4591]: I1203 12:11:40.464203 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:41 crc kubenswrapper[4591]: I1203 12:11:41.297910 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:11:41 crc kubenswrapper[4591]: I1203 12:11:41.346533 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:11:41 crc kubenswrapper[4591]: I1203 12:11:41.910761 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:11:43 crc kubenswrapper[4591]: I1203 12:11:43.910192 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerName="registry" containerID="cri-o://eefda4ed8f40179d744f8bab21a8dbca4029918d9108977d8dd123e4623f11ff" gracePeriod=30 Dec 03 12:11:44 crc kubenswrapper[4591]: I1203 12:11:44.322536 4591 generic.go:334] "Generic (PLEG): container finished" podID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerID="eefda4ed8f40179d744f8bab21a8dbca4029918d9108977d8dd123e4623f11ff" exitCode=0 Dec 03 12:11:44 crc kubenswrapper[4591]: I1203 12:11:44.322610 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" event={"ID":"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa","Type":"ContainerDied","Data":"eefda4ed8f40179d744f8bab21a8dbca4029918d9108977d8dd123e4623f11ff"} Dec 03 12:11:44 crc kubenswrapper[4591]: I1203 12:11:44.580700 4591 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-7qzlf container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.31:5000/healthz\": dial tcp 10.217.0.31:5000: connect: connection refused" start-of-body= Dec 03 12:11:44 crc kubenswrapper[4591]: I1203 12:11:44.580771 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.31:5000/healthz\": dial tcp 10.217.0.31:5000: connect: connection refused" Dec 03 12:11:45 crc kubenswrapper[4591]: I1203 12:11:45.872465 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.013686 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h45zb\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.013760 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.013919 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.014016 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.014101 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.014151 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.014179 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.014349 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\" (UID: \"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa\") " Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.015107 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.015170 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.022791 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb" (OuterVolumeSpecName: "kube-api-access-h45zb") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "kube-api-access-h45zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.023276 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.023698 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.023954 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.028817 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.032544 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" (UID: "2a403cb5-eebc-4d01-990a-a3bbc24c2bfa"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116323 4591 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116352 4591 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116365 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h45zb\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-kube-api-access-h45zb\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116375 4591 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116384 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116393 4591 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.116401 4591 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.339049 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" event={"ID":"2a403cb5-eebc-4d01-990a-a3bbc24c2bfa","Type":"ContainerDied","Data":"b2896ba38b9ac19ac5dedb86b43365df4d7558d599e10b46dfb649bce58d9fa1"} Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.339412 4591 scope.go:117] "RemoveContainer" containerID="eefda4ed8f40179d744f8bab21a8dbca4029918d9108977d8dd123e4623f11ff" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.339157 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7qzlf" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.374336 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.378392 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7qzlf"] Dec 03 12:11:46 crc kubenswrapper[4591]: E1203 12:11:46.387058 4591 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a403cb5_eebc_4d01_990a_a3bbc24c2bfa.slice\": RecentStats: unable to find data in memory cache]" Dec 03 12:11:46 crc kubenswrapper[4591]: I1203 12:11:46.899226 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" path="/var/lib/kubelet/pods/2a403cb5-eebc-4d01-990a-a3bbc24c2bfa/volumes" Dec 03 12:11:50 crc kubenswrapper[4591]: I1203 12:11:50.970518 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:11:50 crc kubenswrapper[4591]: I1203 12:11:50.970904 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.381537 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-cnc8r" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerName="console" containerID="cri-o://7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7" gracePeriod=15 Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.716894 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cnc8r_3d1889fc-cf0c-4114-8653-a7b95c23bdd5/console/0.log" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.717332 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.817109 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.817220 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818026 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca" (OuterVolumeSpecName: "service-ca") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818216 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818390 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtpvt\" (UniqueName: \"kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818420 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818453 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.818491 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert\") pod \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\" (UID: \"3d1889fc-cf0c-4114-8653-a7b95c23bdd5\") " Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.819039 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.819051 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.819092 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config" (OuterVolumeSpecName: "console-config") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.819400 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.822983 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.823266 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.824197 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt" (OuterVolumeSpecName: "kube-api-access-jtpvt") pod "3d1889fc-cf0c-4114-8653-a7b95c23bdd5" (UID: "3d1889fc-cf0c-4114-8653-a7b95c23bdd5"). InnerVolumeSpecName "kube-api-access-jtpvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920859 4591 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920894 4591 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920908 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtpvt\" (UniqueName: \"kubernetes.io/projected/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-kube-api-access-jtpvt\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920921 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920931 4591 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:06 crc kubenswrapper[4591]: I1203 12:12:06.920939 4591 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3d1889fc-cf0c-4114-8653-a7b95c23bdd5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477221 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cnc8r_3d1889fc-cf0c-4114-8653-a7b95c23bdd5/console/0.log" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477355 4591 generic.go:334] "Generic (PLEG): container finished" podID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerID="7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7" exitCode=2 Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477451 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cnc8r" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477449 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cnc8r" event={"ID":"3d1889fc-cf0c-4114-8653-a7b95c23bdd5","Type":"ContainerDied","Data":"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7"} Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477528 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cnc8r" event={"ID":"3d1889fc-cf0c-4114-8653-a7b95c23bdd5","Type":"ContainerDied","Data":"4912d6adc0c2b324cb495fa2ff955fe259f5e4d1600eedd226fe278241c503d4"} Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.477562 4591 scope.go:117] "RemoveContainer" containerID="7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.494822 4591 scope.go:117] "RemoveContainer" containerID="7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.494898 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:12:07 crc kubenswrapper[4591]: E1203 12:12:07.495706 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7\": container with ID starting with 7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7 not found: ID does not exist" containerID="7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.495768 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7"} err="failed to get container status \"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7\": rpc error: code = NotFound desc = could not find container \"7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7\": container with ID starting with 7b792c3764152810e7e175ac2d9ad273775f22c04d4b9ca2d1e424a726ba8ef7 not found: ID does not exist" Dec 03 12:12:07 crc kubenswrapper[4591]: I1203 12:12:07.498230 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-cnc8r"] Dec 03 12:12:08 crc kubenswrapper[4591]: I1203 12:12:08.897315 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" path="/var/lib/kubelet/pods/3d1889fc-cf0c-4114-8653-a7b95c23bdd5/volumes" Dec 03 12:12:10 crc kubenswrapper[4591]: I1203 12:12:10.975201 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:12:10 crc kubenswrapper[4591]: I1203 12:12:10.978880 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/metrics-server-7b878448f-rfd46" Dec 03 12:12:31 crc kubenswrapper[4591]: I1203 12:12:31.909944 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:12:31 crc kubenswrapper[4591]: I1203 12:12:31.939803 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:12:32 crc kubenswrapper[4591]: I1203 12:12:32.647313 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.514616 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:12:42 crc kubenswrapper[4591]: E1203 12:12:42.515425 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerName="registry" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.515440 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerName="registry" Dec 03 12:12:42 crc kubenswrapper[4591]: E1203 12:12:42.515452 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerName="console" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.515458 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerName="console" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.515594 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a403cb5-eebc-4d01-990a-a3bbc24c2bfa" containerName="registry" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.515613 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1889fc-cf0c-4114-8653-a7b95c23bdd5" containerName="console" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.516126 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.528667 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.568878 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.568972 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.568998 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.569019 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.569177 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8v2k\" (UniqueName: \"kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.569207 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.569226 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670566 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670637 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670662 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670679 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670762 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8v2k\" (UniqueName: \"kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670785 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.670803 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.671736 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.671742 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.671789 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.672022 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.676523 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.677248 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.685566 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8v2k\" (UniqueName: \"kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k\") pod \"console-7c96666cff-9fpl8\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:42 crc kubenswrapper[4591]: I1203 12:12:42.832361 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:43 crc kubenswrapper[4591]: I1203 12:12:43.201826 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:12:43 crc kubenswrapper[4591]: W1203 12:12:43.206822 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef80fee5_f895_4f71_a44b_13172da0afd2.slice/crio-39aabeea8c46a724a11dc2f66eab5563c0b4d12991411f94ef9c39c20b1df576 WatchSource:0}: Error finding container 39aabeea8c46a724a11dc2f66eab5563c0b4d12991411f94ef9c39c20b1df576: Status 404 returned error can't find the container with id 39aabeea8c46a724a11dc2f66eab5563c0b4d12991411f94ef9c39c20b1df576 Dec 03 12:12:43 crc kubenswrapper[4591]: I1203 12:12:43.687575 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-9fpl8" event={"ID":"ef80fee5-f895-4f71-a44b-13172da0afd2","Type":"ContainerStarted","Data":"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134"} Dec 03 12:12:43 crc kubenswrapper[4591]: I1203 12:12:43.687632 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-9fpl8" event={"ID":"ef80fee5-f895-4f71-a44b-13172da0afd2","Type":"ContainerStarted","Data":"39aabeea8c46a724a11dc2f66eab5563c0b4d12991411f94ef9c39c20b1df576"} Dec 03 12:12:52 crc kubenswrapper[4591]: I1203 12:12:52.832763 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:52 crc kubenswrapper[4591]: I1203 12:12:52.833630 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:52 crc kubenswrapper[4591]: I1203 12:12:52.838560 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:52 crc kubenswrapper[4591]: I1203 12:12:52.856613 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7c96666cff-9fpl8" podStartSLOduration=10.856576869 podStartE2EDuration="10.856576869s" podCreationTimestamp="2025-12-03 12:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:12:43.708706011 +0000 UTC m=+461.135745781" watchObservedRunningTime="2025-12-03 12:12:52.856576869 +0000 UTC m=+470.283616639" Dec 03 12:12:53 crc kubenswrapper[4591]: I1203 12:12:53.752827 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:12:53 crc kubenswrapper[4591]: I1203 12:12:53.796077 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:13:18 crc kubenswrapper[4591]: I1203 12:13:18.829250 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-798d6cf7d4-x7bph" podUID="6676943b-3f20-4874-9c99-e5da4e28ff07" containerName="console" containerID="cri-o://92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f" gracePeriod=15 Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.135695 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-798d6cf7d4-x7bph_6676943b-3f20-4874-9c99-e5da4e28ff07/console/0.log" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.136014 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254376 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254428 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254446 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254514 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwmwj\" (UniqueName: \"kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254534 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254554 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.254572 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config\") pod \"6676943b-3f20-4874-9c99-e5da4e28ff07\" (UID: \"6676943b-3f20-4874-9c99-e5da4e28ff07\") " Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.255463 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca" (OuterVolumeSpecName: "service-ca") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.255484 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config" (OuterVolumeSpecName: "console-config") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.255497 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.255508 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.260510 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.260522 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.260532 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj" (OuterVolumeSpecName: "kube-api-access-fwmwj") pod "6676943b-3f20-4874-9c99-e5da4e28ff07" (UID: "6676943b-3f20-4874-9c99-e5da4e28ff07"). InnerVolumeSpecName "kube-api-access-fwmwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356662 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356695 4591 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356705 4591 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356726 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwmwj\" (UniqueName: \"kubernetes.io/projected/6676943b-3f20-4874-9c99-e5da4e28ff07-kube-api-access-fwmwj\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356739 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356749 4591 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6676943b-3f20-4874-9c99-e5da4e28ff07-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.356757 4591 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6676943b-3f20-4874-9c99-e5da4e28ff07-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925678 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-798d6cf7d4-x7bph_6676943b-3f20-4874-9c99-e5da4e28ff07/console/0.log" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925768 4591 generic.go:334] "Generic (PLEG): container finished" podID="6676943b-3f20-4874-9c99-e5da4e28ff07" containerID="92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f" exitCode=2 Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925850 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-798d6cf7d4-x7bph" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925840 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-798d6cf7d4-x7bph" event={"ID":"6676943b-3f20-4874-9c99-e5da4e28ff07","Type":"ContainerDied","Data":"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f"} Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925918 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-798d6cf7d4-x7bph" event={"ID":"6676943b-3f20-4874-9c99-e5da4e28ff07","Type":"ContainerDied","Data":"e6a009bf7a2142ef52cabc635512fb6212c5b8a9cda96d020eb29349fbb22aff"} Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.925941 4591 scope.go:117] "RemoveContainer" containerID="92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.946663 4591 scope.go:117] "RemoveContainer" containerID="92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f" Dec 03 12:13:19 crc kubenswrapper[4591]: E1203 12:13:19.947444 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f\": container with ID starting with 92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f not found: ID does not exist" containerID="92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.947490 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f"} err="failed to get container status \"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f\": rpc error: code = NotFound desc = could not find container \"92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f\": container with ID starting with 92282527918f18f9955625072f08d20569a42868981b2b336c17b7a0960f991f not found: ID does not exist" Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.955909 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:13:19 crc kubenswrapper[4591]: I1203 12:13:19.959567 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-798d6cf7d4-x7bph"] Dec 03 12:13:20 crc kubenswrapper[4591]: I1203 12:13:20.908378 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6676943b-3f20-4874-9c99-e5da4e28ff07" path="/var/lib/kubelet/pods/6676943b-3f20-4874-9c99-e5da4e28ff07/volumes" Dec 03 12:13:25 crc kubenswrapper[4591]: I1203 12:13:25.299765 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:13:25 crc kubenswrapper[4591]: I1203 12:13:25.300184 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:13:55 crc kubenswrapper[4591]: I1203 12:13:55.299975 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:13:55 crc kubenswrapper[4591]: I1203 12:13:55.301191 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.670254 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt"] Dec 03 12:13:57 crc kubenswrapper[4591]: E1203 12:13:57.670731 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6676943b-3f20-4874-9c99-e5da4e28ff07" containerName="console" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.670746 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6676943b-3f20-4874-9c99-e5da4e28ff07" containerName="console" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.670890 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="6676943b-3f20-4874-9c99-e5da4e28ff07" containerName="console" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.671713 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.673664 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.681349 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt"] Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.755269 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.755407 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtxgp\" (UniqueName: \"kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.755453 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.856266 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.856343 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtxgp\" (UniqueName: \"kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.856374 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.856933 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.856948 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.875228 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtxgp\" (UniqueName: \"kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:57 crc kubenswrapper[4591]: I1203 12:13:57.985041 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:13:58 crc kubenswrapper[4591]: I1203 12:13:58.350592 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt"] Dec 03 12:13:59 crc kubenswrapper[4591]: I1203 12:13:59.163861 4591 generic.go:334] "Generic (PLEG): container finished" podID="27e3cdd3-0d25-4358-9c11-52196145226d" containerID="7a5617951c60317928a42342cbb5f515d04d2937b8da9b7c48bf8ed2c9fc418e" exitCode=0 Dec 03 12:13:59 crc kubenswrapper[4591]: I1203 12:13:59.163968 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" event={"ID":"27e3cdd3-0d25-4358-9c11-52196145226d","Type":"ContainerDied","Data":"7a5617951c60317928a42342cbb5f515d04d2937b8da9b7c48bf8ed2c9fc418e"} Dec 03 12:13:59 crc kubenswrapper[4591]: I1203 12:13:59.164169 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" event={"ID":"27e3cdd3-0d25-4358-9c11-52196145226d","Type":"ContainerStarted","Data":"b51d323431f4b3b5083552ebfdcb6c975f0627487ab2de4f20325404aba298c8"} Dec 03 12:13:59 crc kubenswrapper[4591]: I1203 12:13:59.165379 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:14:01 crc kubenswrapper[4591]: I1203 12:14:01.182331 4591 generic.go:334] "Generic (PLEG): container finished" podID="27e3cdd3-0d25-4358-9c11-52196145226d" containerID="6fe12324a32661a165c9cd6a9db7af49b9ec5916dd2c03c684e420b530dda19a" exitCode=0 Dec 03 12:14:01 crc kubenswrapper[4591]: I1203 12:14:01.182421 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" event={"ID":"27e3cdd3-0d25-4358-9c11-52196145226d","Type":"ContainerDied","Data":"6fe12324a32661a165c9cd6a9db7af49b9ec5916dd2c03c684e420b530dda19a"} Dec 03 12:14:02 crc kubenswrapper[4591]: I1203 12:14:02.192674 4591 generic.go:334] "Generic (PLEG): container finished" podID="27e3cdd3-0d25-4358-9c11-52196145226d" containerID="61ad97701cd5028d567a676a1a3ee27a9648b74bfd9c474a0dc312dab95a263c" exitCode=0 Dec 03 12:14:02 crc kubenswrapper[4591]: I1203 12:14:02.192735 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" event={"ID":"27e3cdd3-0d25-4358-9c11-52196145226d","Type":"ContainerDied","Data":"61ad97701cd5028d567a676a1a3ee27a9648b74bfd9c474a0dc312dab95a263c"} Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.396776 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.551116 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle\") pod \"27e3cdd3-0d25-4358-9c11-52196145226d\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.551311 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtxgp\" (UniqueName: \"kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp\") pod \"27e3cdd3-0d25-4358-9c11-52196145226d\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.551396 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util\") pod \"27e3cdd3-0d25-4358-9c11-52196145226d\" (UID: \"27e3cdd3-0d25-4358-9c11-52196145226d\") " Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.553232 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle" (OuterVolumeSpecName: "bundle") pod "27e3cdd3-0d25-4358-9c11-52196145226d" (UID: "27e3cdd3-0d25-4358-9c11-52196145226d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.557596 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp" (OuterVolumeSpecName: "kube-api-access-rtxgp") pod "27e3cdd3-0d25-4358-9c11-52196145226d" (UID: "27e3cdd3-0d25-4358-9c11-52196145226d"). InnerVolumeSpecName "kube-api-access-rtxgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.563537 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util" (OuterVolumeSpecName: "util") pod "27e3cdd3-0d25-4358-9c11-52196145226d" (UID: "27e3cdd3-0d25-4358-9c11-52196145226d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.653623 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtxgp\" (UniqueName: \"kubernetes.io/projected/27e3cdd3-0d25-4358-9c11-52196145226d-kube-api-access-rtxgp\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.653681 4591 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:03 crc kubenswrapper[4591]: I1203 12:14:03.653700 4591 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/27e3cdd3-0d25-4358-9c11-52196145226d-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:04 crc kubenswrapper[4591]: I1203 12:14:04.209002 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" event={"ID":"27e3cdd3-0d25-4358-9c11-52196145226d","Type":"ContainerDied","Data":"b51d323431f4b3b5083552ebfdcb6c975f0627487ab2de4f20325404aba298c8"} Dec 03 12:14:04 crc kubenswrapper[4591]: I1203 12:14:04.209375 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b51d323431f4b3b5083552ebfdcb6c975f0627487ab2de4f20325404aba298c8" Dec 03 12:14:04 crc kubenswrapper[4591]: I1203 12:14:04.209240 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt" Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.574910 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-k4dxv"] Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575570 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-controller" containerID="cri-o://c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575651 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="nbdb" containerID="cri-o://4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575710 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-acl-logging" containerID="cri-o://19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575673 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575723 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-node" containerID="cri-o://b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575805 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="sbdb" containerID="cri-o://93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.575835 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="northd" containerID="cri-o://8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" gracePeriod=30 Dec 03 12:14:09 crc kubenswrapper[4591]: I1203 12:14:09.646208 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" containerID="cri-o://d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" gracePeriod=30 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.248199 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovnkube-controller/3.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.250653 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-acl-logging/0.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251199 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-controller/0.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251601 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" exitCode=0 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251632 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" exitCode=0 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251643 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" exitCode=0 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251653 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" exitCode=0 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251661 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" exitCode=143 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251669 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" exitCode=143 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251667 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251710 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251723 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251733 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251740 4591 scope.go:117] "RemoveContainer" containerID="ac72d950e077c1fd23e8816b0657b7e9de65dd710564130d43485581bdaeb9e5" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251742 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.251842 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.254043 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/2.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.254734 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/1.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.254776 4591 generic.go:334] "Generic (PLEG): container finished" podID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" containerID="c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e" exitCode=2 Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.254802 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerDied","Data":"c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e"} Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.255413 4591 scope.go:117] "RemoveContainer" containerID="c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.255681 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-2qprr_openshift-multus(19d5b224-0f8a-49a3-84f4-f2c0ef74fda4)\"" pod="openshift-multus/multus-2qprr" podUID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.276426 4591 scope.go:117] "RemoveContainer" containerID="d78c7a467c6c2b45fb940ac489dc4783f6f7de817ee7e0cf75611795ea2db7ba" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.767166 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-acl-logging/0.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.767875 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-controller/0.log" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.768488 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844446 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-54dt6"] Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844675 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="nbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844692 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="nbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844701 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844710 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844718 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="sbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844724 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="sbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844733 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844738 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844746 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844751 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844761 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="northd" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844766 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="northd" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844774 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="util" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844780 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="util" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844788 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844794 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844802 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-acl-logging" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844807 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-acl-logging" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844814 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="extract" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844820 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="extract" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844830 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844836 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844843 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="pull" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844848 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="pull" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844856 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-node" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844861 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-node" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.844867 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kubecfg-setup" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844873 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kubecfg-setup" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.844990 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845000 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="northd" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845009 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="sbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845015 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845021 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="nbdb" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845028 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-acl-logging" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845038 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-node" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845045 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845051 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e3cdd3-0d25-4358-9c11-52196145226d" containerName="extract" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845072 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovn-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845081 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.845168 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845175 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: E1203 12:14:10.845185 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845190 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845277 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.845459 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerName="ovnkube-controller" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.846865 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868417 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868462 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868507 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868531 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw8cm\" (UniqueName: \"kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868589 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868612 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868631 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868660 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868681 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868709 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868732 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868749 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868773 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868802 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868820 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868844 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868870 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868907 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868949 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.868966 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config\") pod \"362420fc-42a2-444d-b450-49ff1c0eb5c2\" (UID: \"362420fc-42a2-444d-b450-49ff1c0eb5c2\") " Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869181 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-netd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869219 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjkbx\" (UniqueName: \"kubernetes.io/projected/cf97ec3d-0481-4795-a990-158c5d534234-kube-api-access-hjkbx\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869249 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-systemd-units\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869266 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-bin\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869293 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-kubelet\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869334 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-var-lib-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869355 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869372 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869399 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-slash\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869416 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf97ec3d-0481-4795-a990-158c5d534234-ovn-node-metrics-cert\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869437 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-etc-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869454 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-node-log\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869498 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-systemd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869524 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-config\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869545 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-log-socket\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869565 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-ovn\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869587 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869606 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-netns\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869619 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-script-lib\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869633 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-env-overrides\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869723 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash" (OuterVolumeSpecName: "host-slash") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869749 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869770 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log" (OuterVolumeSpecName: "node-log") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869790 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869784 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869819 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.869825 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870178 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870174 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870234 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870290 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870349 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870386 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870432 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870429 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket" (OuterVolumeSpecName: "log-socket") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870474 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.870780 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.884150 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.891305 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm" (OuterVolumeSpecName: "kube-api-access-kw8cm") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "kube-api-access-kw8cm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.892475 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "362420fc-42a2-444d-b450-49ff1c0eb5c2" (UID: "362420fc-42a2-444d-b450-49ff1c0eb5c2"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970713 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-systemd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970793 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-config\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970818 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-log-socket\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970843 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-ovn\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970870 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.970967 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-script-lib\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971002 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-netns\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971032 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-env-overrides\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971100 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-netd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971134 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjkbx\" (UniqueName: \"kubernetes.io/projected/cf97ec3d-0481-4795-a990-158c5d534234-kube-api-access-hjkbx\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971171 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-systemd-units\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971192 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-bin\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971237 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-kubelet\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971282 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-var-lib-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971310 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971332 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971359 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-slash\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971378 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf97ec3d-0481-4795-a990-158c5d534234-ovn-node-metrics-cert\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971405 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-etc-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971422 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-node-log\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971500 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-netd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971552 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-var-lib-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971754 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972437 4591 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972473 4591 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971818 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971827 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-systemd\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972494 4591 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972507 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-env-overrides\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971844 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-run-ovn\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971855 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-etc-openvswitch\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971869 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-node-log\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971865 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-netns\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972587 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-script-lib\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972402 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-log-socket\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971800 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-systemd-units\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971794 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-run-ovn-kubernetes\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971796 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-slash\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972514 4591 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972696 4591 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972705 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf97ec3d-0481-4795-a990-158c5d534234-ovnkube-config\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972709 4591 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972765 4591 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972777 4591 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972788 4591 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972798 4591 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972811 4591 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972820 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw8cm\" (UniqueName: \"kubernetes.io/projected/362420fc-42a2-444d-b450-49ff1c0eb5c2-kube-api-access-kw8cm\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971840 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-kubelet\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972834 4591 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972864 4591 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972878 4591 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972902 4591 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972912 4591 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972920 4591 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/362420fc-42a2-444d-b450-49ff1c0eb5c2-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972931 4591 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.972941 4591 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/362420fc-42a2-444d-b450-49ff1c0eb5c2-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.971906 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cf97ec3d-0481-4795-a990-158c5d534234-host-cni-bin\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:10 crc kubenswrapper[4591]: I1203 12:14:10.977317 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf97ec3d-0481-4795-a990-158c5d534234-ovn-node-metrics-cert\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.006511 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjkbx\" (UniqueName: \"kubernetes.io/projected/cf97ec3d-0481-4795-a990-158c5d534234-kube-api-access-hjkbx\") pod \"ovnkube-node-54dt6\" (UID: \"cf97ec3d-0481-4795-a990-158c5d534234\") " pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.161447 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.263926 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-acl-logging/0.log" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.264580 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-k4dxv_362420fc-42a2-444d-b450-49ff1c0eb5c2/ovn-controller/0.log" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.264916 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" exitCode=0 Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.264947 4591 generic.go:334] "Generic (PLEG): container finished" podID="362420fc-42a2-444d-b450-49ff1c0eb5c2" containerID="b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" exitCode=0 Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.265001 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c"} Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.265082 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd"} Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.265098 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" event={"ID":"362420fc-42a2-444d-b450-49ff1c0eb5c2","Type":"ContainerDied","Data":"41db4860240a93056fae4c94ea19e239c01c3c8dccb5ca63d44263f59525aa08"} Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.265131 4591 scope.go:117] "RemoveContainer" containerID="d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.265313 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-k4dxv" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.267160 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/2.log" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.268324 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"44a588d672d13e5be363df9c5643355a3ac6451bb93698ce431f1391083e6b81"} Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.279768 4591 scope.go:117] "RemoveContainer" containerID="93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.297899 4591 scope.go:117] "RemoveContainer" containerID="4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.341601 4591 scope.go:117] "RemoveContainer" containerID="8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.349263 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-k4dxv"] Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.360755 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-k4dxv"] Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.397842 4591 scope.go:117] "RemoveContainer" containerID="bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.412550 4591 scope.go:117] "RemoveContainer" containerID="b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.427357 4591 scope.go:117] "RemoveContainer" containerID="19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.445014 4591 scope.go:117] "RemoveContainer" containerID="c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.473491 4591 scope.go:117] "RemoveContainer" containerID="da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.490233 4591 scope.go:117] "RemoveContainer" containerID="d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.490773 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e\": container with ID starting with d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e not found: ID does not exist" containerID="d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.490830 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e"} err="failed to get container status \"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e\": rpc error: code = NotFound desc = could not find container \"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e\": container with ID starting with d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.490861 4591 scope.go:117] "RemoveContainer" containerID="93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.491871 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\": container with ID starting with 93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8 not found: ID does not exist" containerID="93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.491912 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8"} err="failed to get container status \"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\": rpc error: code = NotFound desc = could not find container \"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\": container with ID starting with 93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8 not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.491937 4591 scope.go:117] "RemoveContainer" containerID="4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.492140 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\": container with ID starting with 4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e not found: ID does not exist" containerID="4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.492163 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e"} err="failed to get container status \"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\": rpc error: code = NotFound desc = could not find container \"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\": container with ID starting with 4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.492176 4591 scope.go:117] "RemoveContainer" containerID="8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.495539 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\": container with ID starting with 8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595 not found: ID does not exist" containerID="8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.495564 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595"} err="failed to get container status \"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\": rpc error: code = NotFound desc = could not find container \"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\": container with ID starting with 8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595 not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.495578 4591 scope.go:117] "RemoveContainer" containerID="bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.495866 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\": container with ID starting with bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c not found: ID does not exist" containerID="bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.495901 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c"} err="failed to get container status \"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\": rpc error: code = NotFound desc = could not find container \"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\": container with ID starting with bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.495916 4591 scope.go:117] "RemoveContainer" containerID="b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.496286 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\": container with ID starting with b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd not found: ID does not exist" containerID="b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.496327 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd"} err="failed to get container status \"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\": rpc error: code = NotFound desc = could not find container \"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\": container with ID starting with b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.496348 4591 scope.go:117] "RemoveContainer" containerID="19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.496585 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\": container with ID starting with 19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e not found: ID does not exist" containerID="19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.496607 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e"} err="failed to get container status \"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\": rpc error: code = NotFound desc = could not find container \"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\": container with ID starting with 19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.496620 4591 scope.go:117] "RemoveContainer" containerID="c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.497404 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\": container with ID starting with c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d not found: ID does not exist" containerID="c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.497473 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d"} err="failed to get container status \"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\": rpc error: code = NotFound desc = could not find container \"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\": container with ID starting with c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.497490 4591 scope.go:117] "RemoveContainer" containerID="da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f" Dec 03 12:14:11 crc kubenswrapper[4591]: E1203 12:14:11.497743 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\": container with ID starting with da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f not found: ID does not exist" containerID="da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.497771 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f"} err="failed to get container status \"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\": rpc error: code = NotFound desc = could not find container \"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\": container with ID starting with da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.497787 4591 scope.go:117] "RemoveContainer" containerID="d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498012 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e"} err="failed to get container status \"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e\": rpc error: code = NotFound desc = could not find container \"d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e\": container with ID starting with d0591df2f5ad6988cc47d792478bfb0275bb3dcf82e63ed243a6c7f598f5f65e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498048 4591 scope.go:117] "RemoveContainer" containerID="93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498372 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8"} err="failed to get container status \"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\": rpc error: code = NotFound desc = could not find container \"93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8\": container with ID starting with 93abb6954af5e7a222a5569d70ea58a9b3fe75db52438171d070ecc284b706a8 not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498395 4591 scope.go:117] "RemoveContainer" containerID="4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498922 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e"} err="failed to get container status \"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\": rpc error: code = NotFound desc = could not find container \"4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e\": container with ID starting with 4a923aff47021271110420a8eb9cd88dcee037b94f8ba840d017e46afd237d1e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.498944 4591 scope.go:117] "RemoveContainer" containerID="8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.499394 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595"} err="failed to get container status \"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\": rpc error: code = NotFound desc = could not find container \"8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595\": container with ID starting with 8e91c2e5e949a98f1c676d6bdc648f34faceaa2bf50389fb5b15d052eeed4595 not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.499443 4591 scope.go:117] "RemoveContainer" containerID="bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.499848 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c"} err="failed to get container status \"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\": rpc error: code = NotFound desc = could not find container \"bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c\": container with ID starting with bc93fadff2685fba19b9002c0704d8cbe308b8bf62df24427a32eda607f91a8c not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.499898 4591 scope.go:117] "RemoveContainer" containerID="b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500195 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd"} err="failed to get container status \"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\": rpc error: code = NotFound desc = could not find container \"b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd\": container with ID starting with b3cb23ae0edeb428a834bb7f9d92d839d23776c7998ddd5a69620a5530e667cd not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500220 4591 scope.go:117] "RemoveContainer" containerID="19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500583 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e"} err="failed to get container status \"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\": rpc error: code = NotFound desc = could not find container \"19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e\": container with ID starting with 19987a84fa03fadae1b464b2a1690ed180fb1b87c458dfbbf2d0ccbcaa98534e not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500606 4591 scope.go:117] "RemoveContainer" containerID="c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500965 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d"} err="failed to get container status \"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\": rpc error: code = NotFound desc = could not find container \"c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d\": container with ID starting with c9f9b43e09c36b0def3cd5c2916d4372f69795d0a7a2e70f72ad9e7acab9b34d not found: ID does not exist" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.500984 4591 scope.go:117] "RemoveContainer" containerID="da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f" Dec 03 12:14:11 crc kubenswrapper[4591]: I1203 12:14:11.501342 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f"} err="failed to get container status \"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\": rpc error: code = NotFound desc = could not find container \"da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f\": container with ID starting with da8da152552d70602e9c039a10e428b0e89c783145da338440589e2c11ec1e5f not found: ID does not exist" Dec 03 12:14:12 crc kubenswrapper[4591]: I1203 12:14:12.275384 4591 generic.go:334] "Generic (PLEG): container finished" podID="cf97ec3d-0481-4795-a990-158c5d534234" containerID="9bc48d876de53f61422e7d23b36f766b6945d424b569c58cf070681e082b6931" exitCode=0 Dec 03 12:14:12 crc kubenswrapper[4591]: I1203 12:14:12.275447 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerDied","Data":"9bc48d876de53f61422e7d23b36f766b6945d424b569c58cf070681e082b6931"} Dec 03 12:14:12 crc kubenswrapper[4591]: I1203 12:14:12.897745 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="362420fc-42a2-444d-b450-49ff1c0eb5c2" path="/var/lib/kubelet/pods/362420fc-42a2-444d-b450-49ff1c0eb5c2/volumes" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288430 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"18442b5a7c17d5bf5f98118dc52771b2a551407fad3e05832e9329efffe50682"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288481 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"8a65a3bcab8d47ccfccf6866c3df8d78f6f60a8f3aaec432649dc80f440e54cb"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288493 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"ae3c6d1fcb611bb4bf5235bc7c5e8db77727480a1732ac85f54b338aa5387c58"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288503 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"7285e6df03dbf7977b7c2d9e86b479ad7820eede95d5691d4f508301d0afaf90"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288513 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"0851cf6e99e7d8704efd2cc17d47e8fa739b4c6c9a5ee839d66382e73fe02e17"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.288522 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"5bd9ee841f42c21fb6bb17d46df99d5de54a8324e6884ce1215a52ccdb7c81f5"} Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.800710 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg"] Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.801783 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.804977 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.805401 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.805627 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-7b7nz" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.817760 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgt5r\" (UniqueName: \"kubernetes.io/projected/9ddbe8c9-be6f-4bae-ac8e-18800b197f3b-kube-api-access-pgt5r\") pod \"obo-prometheus-operator-668cf9dfbb-c4cdg\" (UID: \"9ddbe8c9-be6f-4bae-ac8e-18800b197f3b\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.918709 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgt5r\" (UniqueName: \"kubernetes.io/projected/9ddbe8c9-be6f-4bae-ac8e-18800b197f3b-kube-api-access-pgt5r\") pod \"obo-prometheus-operator-668cf9dfbb-c4cdg\" (UID: \"9ddbe8c9-be6f-4bae-ac8e-18800b197f3b\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.923683 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w"] Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.924597 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.926010 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-gwpn4" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.926657 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.933974 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg"] Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.934807 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:13 crc kubenswrapper[4591]: I1203 12:14:13.943811 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgt5r\" (UniqueName: \"kubernetes.io/projected/9ddbe8c9-be6f-4bae-ac8e-18800b197f3b-kube-api-access-pgt5r\") pod \"obo-prometheus-operator-668cf9dfbb-c4cdg\" (UID: \"9ddbe8c9-be6f-4bae-ac8e-18800b197f3b\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.021430 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.021767 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.021806 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.021979 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.068577 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rdtt7"] Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.069503 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.071593 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.072809 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-vhfxf" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.120732 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.123760 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.123800 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.123836 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.123882 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9zpp\" (UniqueName: \"kubernetes.io/projected/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-kube-api-access-m9zpp\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.123976 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.124043 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.133669 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.136538 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.137950 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4a7721a3-0074-41f9-b794-6f739872fccf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w\" (UID: \"4a7721a3-0074-41f9-b794-6f739872fccf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.145445 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca94d274-d7be-4190-9703-65e47e03b0a5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg\" (UID: \"ca94d274-d7be-4190-9703-65e47e03b0a5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.158677 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(6a2cee4640f21c749b3427377af007ee05e39ce046df73913968386d6f5a7f4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.158816 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(6a2cee4640f21c749b3427377af007ee05e39ce046df73913968386d6f5a7f4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.158846 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(6a2cee4640f21c749b3427377af007ee05e39ce046df73913968386d6f5a7f4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.158923 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(6a2cee4640f21c749b3427377af007ee05e39ce046df73913968386d6f5a7f4c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" podUID="9ddbe8c9-be6f-4bae-ac8e-18800b197f3b" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.225518 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.225603 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9zpp\" (UniqueName: \"kubernetes.io/projected/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-kube-api-access-m9zpp\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.227887 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b7b9r"] Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.229621 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.229851 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.234204 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-8z77f" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.240918 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.242187 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9zpp\" (UniqueName: \"kubernetes.io/projected/98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a-kube-api-access-m9zpp\") pod \"observability-operator-d8bb48f5d-rdtt7\" (UID: \"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a\") " pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.262640 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(e4566193ffabb1417850fb4657cf1e3b23c07bfd6165b275c3ecca0dc052529f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.262760 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(e4566193ffabb1417850fb4657cf1e3b23c07bfd6165b275c3ecca0dc052529f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.262850 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(e4566193ffabb1417850fb4657cf1e3b23c07bfd6165b275c3ecca0dc052529f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.262948 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(e4566193ffabb1417850fb4657cf1e3b23c07bfd6165b275c3ecca0dc052529f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" podUID="4a7721a3-0074-41f9-b794-6f739872fccf" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.265499 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.288618 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(8de3087853cc993881943b587b170a00e87797330d43497441a88fd236dd91c7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.288679 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(8de3087853cc993881943b587b170a00e87797330d43497441a88fd236dd91c7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.288704 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(8de3087853cc993881943b587b170a00e87797330d43497441a88fd236dd91c7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.288746 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(8de3087853cc993881943b587b170a00e87797330d43497441a88fd236dd91c7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" podUID="ca94d274-d7be-4190-9703-65e47e03b0a5" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.326798 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c9360e0-cdca-4f3b-895b-27de95ec9f40-openshift-service-ca\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.326886 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7tjj\" (UniqueName: \"kubernetes.io/projected/2c9360e0-cdca-4f3b-895b-27de95ec9f40-kube-api-access-f7tjj\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.383861 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.418533 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(5a903b683c021f2f3ddbf7d64a6fb50ac12e2e9648ffeba998ae435cc15d15ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.418599 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(5a903b683c021f2f3ddbf7d64a6fb50ac12e2e9648ffeba998ae435cc15d15ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.418624 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(5a903b683c021f2f3ddbf7d64a6fb50ac12e2e9648ffeba998ae435cc15d15ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.418679 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(5a903b683c021f2f3ddbf7d64a6fb50ac12e2e9648ffeba998ae435cc15d15ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" podUID="98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.429038 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c9360e0-cdca-4f3b-895b-27de95ec9f40-openshift-service-ca\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.429173 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7tjj\" (UniqueName: \"kubernetes.io/projected/2c9360e0-cdca-4f3b-895b-27de95ec9f40-kube-api-access-f7tjj\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.429992 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/2c9360e0-cdca-4f3b-895b-27de95ec9f40-openshift-service-ca\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.447279 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7tjj\" (UniqueName: \"kubernetes.io/projected/2c9360e0-cdca-4f3b-895b-27de95ec9f40-kube-api-access-f7tjj\") pod \"perses-operator-5446b9c989-b7b9r\" (UID: \"2c9360e0-cdca-4f3b-895b-27de95ec9f40\") " pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: I1203 12:14:14.554139 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.575718 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(023782a38f554c86d5b5937c1a9a1a27d88b0a5750e9c962e73d39a6f0009c27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.575866 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(023782a38f554c86d5b5937c1a9a1a27d88b0a5750e9c962e73d39a6f0009c27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.575947 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(023782a38f554c86d5b5937c1a9a1a27d88b0a5750e9c962e73d39a6f0009c27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:14 crc kubenswrapper[4591]: E1203 12:14:14.576050 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(023782a38f554c86d5b5937c1a9a1a27d88b0a5750e9c962e73d39a6f0009c27): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" podUID="2c9360e0-cdca-4f3b-895b-27de95ec9f40" Dec 03 12:14:15 crc kubenswrapper[4591]: I1203 12:14:15.303890 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"160dd22d0d97106a92f09c5ddf705f33d6842438c8b218a6fccbfdb02e362e4f"} Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.098750 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w"] Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.099416 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.099881 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.102412 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rdtt7"] Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.102537 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.102961 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.111040 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg"] Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.111179 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.111449 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.142569 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg"] Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.142640 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.143234 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.181347 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b7b9r"] Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.181426 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.181672 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217279 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(3de84df5ae1b0ac2ed55802b2d166c6141507b87edf47fbb950537a390712317): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217362 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(3de84df5ae1b0ac2ed55802b2d166c6141507b87edf47fbb950537a390712317): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217391 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(3de84df5ae1b0ac2ed55802b2d166c6141507b87edf47fbb950537a390712317): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217447 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(3de84df5ae1b0ac2ed55802b2d166c6141507b87edf47fbb950537a390712317): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" podUID="4a7721a3-0074-41f9-b794-6f739872fccf" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217734 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(f4d5c870439a471d1389fbca20b6af9386ec3873b03f0c1f83b38c713eeed675): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217755 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(f4d5c870439a471d1389fbca20b6af9386ec3873b03f0c1f83b38c713eeed675): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217771 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(f4d5c870439a471d1389fbca20b6af9386ec3873b03f0c1f83b38c713eeed675): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.217798 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(f4d5c870439a471d1389fbca20b6af9386ec3873b03f0c1f83b38c713eeed675): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" podUID="98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.266607 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(2ffa787e9785092899f18bc23225bd841bbe60d2338aaa9a25f6b17094797fce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.266676 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(2ffa787e9785092899f18bc23225bd841bbe60d2338aaa9a25f6b17094797fce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.266699 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(2ffa787e9785092899f18bc23225bd841bbe60d2338aaa9a25f6b17094797fce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.266751 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(2ffa787e9785092899f18bc23225bd841bbe60d2338aaa9a25f6b17094797fce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" podUID="ca94d274-d7be-4190-9703-65e47e03b0a5" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.281661 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(f040d94a00982dd030fcc5bc5b04cdef8e2d6fdf54776263f79505def9d2583a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.281730 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(f040d94a00982dd030fcc5bc5b04cdef8e2d6fdf54776263f79505def9d2583a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.281749 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(f040d94a00982dd030fcc5bc5b04cdef8e2d6fdf54776263f79505def9d2583a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.281795 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(f040d94a00982dd030fcc5bc5b04cdef8e2d6fdf54776263f79505def9d2583a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" podUID="2c9360e0-cdca-4f3b-895b-27de95ec9f40" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.296551 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(edcab860aade6f56a4bfad0bb34c7df46867126fbb2c418abb114d83ae8a9953): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.296616 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(edcab860aade6f56a4bfad0bb34c7df46867126fbb2c418abb114d83ae8a9953): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.296642 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(edcab860aade6f56a4bfad0bb34c7df46867126fbb2c418abb114d83ae8a9953): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:18 crc kubenswrapper[4591]: E1203 12:14:18.296692 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(edcab860aade6f56a4bfad0bb34c7df46867126fbb2c418abb114d83ae8a9953): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" podUID="9ddbe8c9-be6f-4bae-ac8e-18800b197f3b" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.325964 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" event={"ID":"cf97ec3d-0481-4795-a990-158c5d534234","Type":"ContainerStarted","Data":"1ff979a3ca1dec06c7074f1e8dfda5e6299c4f4c615e72df8426cc25f981298b"} Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.327557 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.327592 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.327641 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.356808 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" podStartSLOduration=8.356797238 podStartE2EDuration="8.356797238s" podCreationTimestamp="2025-12-03 12:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:14:18.353579069 +0000 UTC m=+555.780618830" watchObservedRunningTime="2025-12-03 12:14:18.356797238 +0000 UTC m=+555.783837008" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.360925 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:18 crc kubenswrapper[4591]: I1203 12:14:18.362536 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:22 crc kubenswrapper[4591]: I1203 12:14:22.893192 4591 scope.go:117] "RemoveContainer" containerID="c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e" Dec 03 12:14:22 crc kubenswrapper[4591]: E1203 12:14:22.893776 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-2qprr_openshift-multus(19d5b224-0f8a-49a3-84f4-f2c0ef74fda4)\"" pod="openshift-multus/multus-2qprr" podUID="19d5b224-0f8a-49a3-84f4-f2c0ef74fda4" Dec 03 12:14:25 crc kubenswrapper[4591]: I1203 12:14:25.300050 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:14:25 crc kubenswrapper[4591]: I1203 12:14:25.300128 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:14:25 crc kubenswrapper[4591]: I1203 12:14:25.300173 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:14:25 crc kubenswrapper[4591]: I1203 12:14:25.300590 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:14:25 crc kubenswrapper[4591]: I1203 12:14:25.300647 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247" gracePeriod=600 Dec 03 12:14:26 crc kubenswrapper[4591]: I1203 12:14:26.374365 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247" exitCode=0 Dec 03 12:14:26 crc kubenswrapper[4591]: I1203 12:14:26.374711 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247"} Dec 03 12:14:26 crc kubenswrapper[4591]: I1203 12:14:26.374749 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7"} Dec 03 12:14:26 crc kubenswrapper[4591]: I1203 12:14:26.374769 4591 scope.go:117] "RemoveContainer" containerID="5b48d026487ee4ed01e0e2bfbf7bb4dc4d646df3eb7c48ee2b7d086035594359" Dec 03 12:14:30 crc kubenswrapper[4591]: I1203 12:14:30.889850 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:30 crc kubenswrapper[4591]: I1203 12:14:30.890018 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:30 crc kubenswrapper[4591]: I1203 12:14:30.890791 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:30 crc kubenswrapper[4591]: I1203 12:14:30.890828 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936191 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(62bcf01167f5a267f5dfd462f5be35cf47ed5a14e2e6c2696075ad79c9749788): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936412 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(62bcf01167f5a267f5dfd462f5be35cf47ed5a14e2e6c2696075ad79c9749788): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936440 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(62bcf01167f5a267f5dfd462f5be35cf47ed5a14e2e6c2696075ad79c9749788): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936197 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(d47ccfbc14be073c7f919c28f732e86915a94d0b78670df8482199cc4e7a04ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936491 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-b7b9r_openshift-operators(2c9360e0-cdca-4f3b-895b-27de95ec9f40)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-b7b9r_openshift-operators_2c9360e0-cdca-4f3b-895b-27de95ec9f40_0(62bcf01167f5a267f5dfd462f5be35cf47ed5a14e2e6c2696075ad79c9749788): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" podUID="2c9360e0-cdca-4f3b-895b-27de95ec9f40" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936510 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(d47ccfbc14be073c7f919c28f732e86915a94d0b78670df8482199cc4e7a04ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936540 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(d47ccfbc14be073c7f919c28f732e86915a94d0b78670df8482199cc4e7a04ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:30 crc kubenswrapper[4591]: E1203 12:14:30.936607 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators(4a7721a3-0074-41f9-b794-6f739872fccf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_openshift-operators_4a7721a3-0074-41f9-b794-6f739872fccf_0(d47ccfbc14be073c7f919c28f732e86915a94d0b78670df8482199cc4e7a04ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" podUID="4a7721a3-0074-41f9-b794-6f739872fccf" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.890208 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.890280 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.890298 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.893608 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.893664 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:32 crc kubenswrapper[4591]: I1203 12:14:32.893665 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.932800 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(337f05def458bde521fc7d37d7376b9478fc8026b70ff9f08809a29d49227b31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.932870 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(337f05def458bde521fc7d37d7376b9478fc8026b70ff9f08809a29d49227b31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.932897 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(337f05def458bde521fc7d37d7376b9478fc8026b70ff9f08809a29d49227b31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.932960 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators(ca94d274-d7be-4190-9703-65e47e03b0a5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_openshift-operators_ca94d274-d7be-4190-9703-65e47e03b0a5_0(337f05def458bde521fc7d37d7376b9478fc8026b70ff9f08809a29d49227b31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" podUID="ca94d274-d7be-4190-9703-65e47e03b0a5" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.952998 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(e6219fb8f2cb2751bf7fd7fc2cc27ed210f065f37eccea9bf90d69f8bbf94fd5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.953054 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(e6219fb8f2cb2751bf7fd7fc2cc27ed210f065f37eccea9bf90d69f8bbf94fd5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.953092 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(e6219fb8f2cb2751bf7fd7fc2cc27ed210f065f37eccea9bf90d69f8bbf94fd5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.953140 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-rdtt7_openshift-operators(98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-rdtt7_openshift-operators_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a_0(e6219fb8f2cb2751bf7fd7fc2cc27ed210f065f37eccea9bf90d69f8bbf94fd5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" podUID="98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.957899 4591 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(92b49e43a1c144dd3335654ae01ae3c9859b5b8eb2d12a839bf42ec7e1f150d1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.957959 4591 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(92b49e43a1c144dd3335654ae01ae3c9859b5b8eb2d12a839bf42ec7e1f150d1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.958022 4591 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(92b49e43a1c144dd3335654ae01ae3c9859b5b8eb2d12a839bf42ec7e1f150d1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:32 crc kubenswrapper[4591]: E1203 12:14:32.958091 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators(9ddbe8c9-be6f-4bae-ac8e-18800b197f3b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-c4cdg_openshift-operators_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b_0(92b49e43a1c144dd3335654ae01ae3c9859b5b8eb2d12a839bf42ec7e1f150d1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" podUID="9ddbe8c9-be6f-4bae-ac8e-18800b197f3b" Dec 03 12:14:35 crc kubenswrapper[4591]: I1203 12:14:35.890866 4591 scope.go:117] "RemoveContainer" containerID="c8b78dd322bb74ab016f9c66c2bbad1989573a75101d93eda813a266b14a5b2e" Dec 03 12:14:36 crc kubenswrapper[4591]: I1203 12:14:36.432842 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2qprr_19d5b224-0f8a-49a3-84f4-f2c0ef74fda4/kube-multus/2.log" Dec 03 12:14:36 crc kubenswrapper[4591]: I1203 12:14:36.433212 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2qprr" event={"ID":"19d5b224-0f8a-49a3-84f4-f2c0ef74fda4","Type":"ContainerStarted","Data":"2cc8c14f04b8da409ca86a8e36ce1dca827ef52efaf0fe4b080e1da9311687a7"} Dec 03 12:14:41 crc kubenswrapper[4591]: I1203 12:14:41.184596 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-54dt6" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.890451 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.890451 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.890540 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.891471 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.891760 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" Dec 03 12:14:43 crc kubenswrapper[4591]: I1203 12:14:43.891806 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.230530 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w"] Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.280414 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-rdtt7"] Dec 03 12:14:44 crc kubenswrapper[4591]: W1203 12:14:44.284034 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f8ce4f_b88f_4051_89c7_7c2dbfbcbe2a.slice/crio-b6fa046258ef232568ea548fc2e98fdfe6674b7b257ed398bf5939c88155c387 WatchSource:0}: Error finding container b6fa046258ef232568ea548fc2e98fdfe6674b7b257ed398bf5939c88155c387: Status 404 returned error can't find the container with id b6fa046258ef232568ea548fc2e98fdfe6674b7b257ed398bf5939c88155c387 Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.331534 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-b7b9r"] Dec 03 12:14:44 crc kubenswrapper[4591]: W1203 12:14:44.335206 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c9360e0_cdca_4f3b_895b_27de95ec9f40.slice/crio-8ae6a639ff474acd44713f5b88bf4d6b20f0151e6ae8d63c2be7c666b78de3a9 WatchSource:0}: Error finding container 8ae6a639ff474acd44713f5b88bf4d6b20f0151e6ae8d63c2be7c666b78de3a9: Status 404 returned error can't find the container with id 8ae6a639ff474acd44713f5b88bf4d6b20f0151e6ae8d63c2be7c666b78de3a9 Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.500609 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" event={"ID":"2c9360e0-cdca-4f3b-895b-27de95ec9f40","Type":"ContainerStarted","Data":"8ae6a639ff474acd44713f5b88bf4d6b20f0151e6ae8d63c2be7c666b78de3a9"} Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.502284 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" event={"ID":"4a7721a3-0074-41f9-b794-6f739872fccf","Type":"ContainerStarted","Data":"a92cec92a8b6fcf665717b04ebde69b00a108a5e8ab50779ed643c491dac98db"} Dec 03 12:14:44 crc kubenswrapper[4591]: I1203 12:14:44.504993 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" event={"ID":"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a","Type":"ContainerStarted","Data":"b6fa046258ef232568ea548fc2e98fdfe6674b7b257ed398bf5939c88155c387"} Dec 03 12:14:45 crc kubenswrapper[4591]: I1203 12:14:45.890815 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:45 crc kubenswrapper[4591]: I1203 12:14:45.891571 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" Dec 03 12:14:46 crc kubenswrapper[4591]: I1203 12:14:46.102472 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg"] Dec 03 12:14:46 crc kubenswrapper[4591]: I1203 12:14:46.519416 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" event={"ID":"ca94d274-d7be-4190-9703-65e47e03b0a5","Type":"ContainerStarted","Data":"9b90f4d5d7f111a4f6413129f677dd66a869fb65d866a99a32e3a5617bca6ce3"} Dec 03 12:14:47 crc kubenswrapper[4591]: I1203 12:14:47.889846 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:47 crc kubenswrapper[4591]: I1203 12:14:47.890465 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" Dec 03 12:14:48 crc kubenswrapper[4591]: I1203 12:14:48.072925 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg"] Dec 03 12:14:48 crc kubenswrapper[4591]: W1203 12:14:48.087738 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ddbe8c9_be6f_4bae_ac8e_18800b197f3b.slice/crio-ba7f093f628234a771480e24410b0cc9d86651ebdab401d331558eca9bbf5b74 WatchSource:0}: Error finding container ba7f093f628234a771480e24410b0cc9d86651ebdab401d331558eca9bbf5b74: Status 404 returned error can't find the container with id ba7f093f628234a771480e24410b0cc9d86651ebdab401d331558eca9bbf5b74 Dec 03 12:14:48 crc kubenswrapper[4591]: I1203 12:14:48.534647 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" event={"ID":"9ddbe8c9-be6f-4bae-ac8e-18800b197f3b","Type":"ContainerStarted","Data":"ba7f093f628234a771480e24410b0cc9d86651ebdab401d331558eca9bbf5b74"} Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.571268 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" event={"ID":"2c9360e0-cdca-4f3b-895b-27de95ec9f40","Type":"ContainerStarted","Data":"c18de701c02691ef689f3e2372dc5d605ce5b3b2f61f0fbd193d110a3451442d"} Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.571820 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.574195 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" event={"ID":"ca94d274-d7be-4190-9703-65e47e03b0a5","Type":"ContainerStarted","Data":"cccd3a60319b06e19bab2d0709b61d632d2671b017923ad89ded861eb7b3a30d"} Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.576948 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" event={"ID":"4a7721a3-0074-41f9-b794-6f739872fccf","Type":"ContainerStarted","Data":"24ff75c2e8506e8e1880f049a49463688e65de4d2ccb00073405abc8bdf4632e"} Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.578826 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" event={"ID":"98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a","Type":"ContainerStarted","Data":"029f657744c9d4734efde557a3e9b033909007a804ee0b4fc71a13b6de8cb076"} Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.579222 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.587275 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" podStartSLOduration=30.976852401 podStartE2EDuration="39.587264097s" podCreationTimestamp="2025-12-03 12:14:14 +0000 UTC" firstStartedPulling="2025-12-03 12:14:44.338033043 +0000 UTC m=+581.765072813" lastFinishedPulling="2025-12-03 12:14:52.948444739 +0000 UTC m=+590.375484509" observedRunningTime="2025-12-03 12:14:53.582328085 +0000 UTC m=+591.009367856" watchObservedRunningTime="2025-12-03 12:14:53.587264097 +0000 UTC m=+591.014303867" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.598852 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg" podStartSLOduration=33.781653279 podStartE2EDuration="40.5988423s" podCreationTimestamp="2025-12-03 12:14:13 +0000 UTC" firstStartedPulling="2025-12-03 12:14:46.114010386 +0000 UTC m=+583.541050156" lastFinishedPulling="2025-12-03 12:14:52.931199407 +0000 UTC m=+590.358239177" observedRunningTime="2025-12-03 12:14:53.596032021 +0000 UTC m=+591.023071801" watchObservedRunningTime="2025-12-03 12:14:53.5988423 +0000 UTC m=+591.025882071" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.606910 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.625244 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-rdtt7" podStartSLOduration=30.944964102 podStartE2EDuration="39.625227978s" podCreationTimestamp="2025-12-03 12:14:14 +0000 UTC" firstStartedPulling="2025-12-03 12:14:44.286304197 +0000 UTC m=+581.713343967" lastFinishedPulling="2025-12-03 12:14:52.966568073 +0000 UTC m=+590.393607843" observedRunningTime="2025-12-03 12:14:53.615237343 +0000 UTC m=+591.042277112" watchObservedRunningTime="2025-12-03 12:14:53.625227978 +0000 UTC m=+591.052267748" Dec 03 12:14:53 crc kubenswrapper[4591]: I1203 12:14:53.641898 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-56547b7db6-nm58w" podStartSLOduration=31.93343674 podStartE2EDuration="40.641882918s" podCreationTimestamp="2025-12-03 12:14:13 +0000 UTC" firstStartedPulling="2025-12-03 12:14:44.239459767 +0000 UTC m=+581.666499537" lastFinishedPulling="2025-12-03 12:14:52.947905945 +0000 UTC m=+590.374945715" observedRunningTime="2025-12-03 12:14:53.631899125 +0000 UTC m=+591.058938896" watchObservedRunningTime="2025-12-03 12:14:53.641882918 +0000 UTC m=+591.068922688" Dec 03 12:14:54 crc kubenswrapper[4591]: I1203 12:14:54.586186 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" event={"ID":"9ddbe8c9-be6f-4bae-ac8e-18800b197f3b","Type":"ContainerStarted","Data":"63711b7a8bcff5f3cd8d8f1f699d102b26432b99d3acf331938a59c224e5b006"} Dec 03 12:14:54 crc kubenswrapper[4591]: I1203 12:14:54.600561 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-c4cdg" podStartSLOduration=35.902943746 podStartE2EDuration="41.600540331s" podCreationTimestamp="2025-12-03 12:14:13 +0000 UTC" firstStartedPulling="2025-12-03 12:14:48.092769952 +0000 UTC m=+585.519809721" lastFinishedPulling="2025-12-03 12:14:53.790366536 +0000 UTC m=+591.217406306" observedRunningTime="2025-12-03 12:14:54.598939959 +0000 UTC m=+592.025979729" watchObservedRunningTime="2025-12-03 12:14:54.600540331 +0000 UTC m=+592.027580101" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.074606 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ncpgb"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.076125 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-ncpgb" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.078271 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.078323 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.078497 4591 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-l6kz9" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.086716 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wt6ln"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.087327 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.088930 4591 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-58rmw" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.104240 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h279r"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.106371 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.108677 4591 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-2l4sw" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.117883 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ncpgb"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.129829 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h279r"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.142296 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wt6ln"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.176352 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.177457 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.183916 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.184643 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.189793 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.190120 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wrsp\" (UniqueName: \"kubernetes.io/projected/551b74b5-4ba8-4b29-b23b-fd955fbe0819-kube-api-access-9wrsp\") pod \"cert-manager-cainjector-7f985d654d-wt6ln\" (UID: \"551b74b5-4ba8-4b29-b23b-fd955fbe0819\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.190314 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hjh7\" (UniqueName: \"kubernetes.io/projected/72176880-d1d6-48ed-9099-15650cd27b96-kube-api-access-4hjh7\") pod \"cert-manager-webhook-5655c58dd6-h279r\" (UID: \"72176880-d1d6-48ed-9099-15650cd27b96\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.190448 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xszc\" (UniqueName: \"kubernetes.io/projected/b7cb056f-6d6d-4e52-adcc-10c051df9400-kube-api-access-5xszc\") pod \"cert-manager-5b446d88c5-ncpgb\" (UID: \"b7cb056f-6d6d-4e52-adcc-10c051df9400\") " pod="cert-manager/cert-manager-5b446d88c5-ncpgb" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292100 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hjh7\" (UniqueName: \"kubernetes.io/projected/72176880-d1d6-48ed-9099-15650cd27b96-kube-api-access-4hjh7\") pod \"cert-manager-webhook-5655c58dd6-h279r\" (UID: \"72176880-d1d6-48ed-9099-15650cd27b96\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292167 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292226 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xszc\" (UniqueName: \"kubernetes.io/projected/b7cb056f-6d6d-4e52-adcc-10c051df9400-kube-api-access-5xszc\") pod \"cert-manager-5b446d88c5-ncpgb\" (UID: \"b7cb056f-6d6d-4e52-adcc-10c051df9400\") " pod="cert-manager/cert-manager-5b446d88c5-ncpgb" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292281 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wrsp\" (UniqueName: \"kubernetes.io/projected/551b74b5-4ba8-4b29-b23b-fd955fbe0819-kube-api-access-9wrsp\") pod \"cert-manager-cainjector-7f985d654d-wt6ln\" (UID: \"551b74b5-4ba8-4b29-b23b-fd955fbe0819\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292377 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hndjg\" (UniqueName: \"kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.292398 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.309238 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wrsp\" (UniqueName: \"kubernetes.io/projected/551b74b5-4ba8-4b29-b23b-fd955fbe0819-kube-api-access-9wrsp\") pod \"cert-manager-cainjector-7f985d654d-wt6ln\" (UID: \"551b74b5-4ba8-4b29-b23b-fd955fbe0819\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.310735 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hjh7\" (UniqueName: \"kubernetes.io/projected/72176880-d1d6-48ed-9099-15650cd27b96-kube-api-access-4hjh7\") pod \"cert-manager-webhook-5655c58dd6-h279r\" (UID: \"72176880-d1d6-48ed-9099-15650cd27b96\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.311007 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xszc\" (UniqueName: \"kubernetes.io/projected/b7cb056f-6d6d-4e52-adcc-10c051df9400-kube-api-access-5xszc\") pod \"cert-manager-5b446d88c5-ncpgb\" (UID: \"b7cb056f-6d6d-4e52-adcc-10c051df9400\") " pod="cert-manager/cert-manager-5b446d88c5-ncpgb" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.390977 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-ncpgb" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.394263 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hndjg\" (UniqueName: \"kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.394377 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.394641 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.395702 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.398696 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.409662 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hndjg\" (UniqueName: \"kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg\") pod \"collect-profiles-29412735-rh2bv\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.418672 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.441606 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.491384 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.795627 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-ncpgb"] Dec 03 12:15:00 crc kubenswrapper[4591]: W1203 12:15:00.799539 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7cb056f_6d6d_4e52_adcc_10c051df9400.slice/crio-b5b579cf5716a7c57ba4e0c31803bd3b4999ea4f9bc3728fd0f4f4cc9c3aa8a0 WatchSource:0}: Error finding container b5b579cf5716a7c57ba4e0c31803bd3b4999ea4f9bc3728fd0f4f4cc9c3aa8a0: Status 404 returned error can't find the container with id b5b579cf5716a7c57ba4e0c31803bd3b4999ea4f9bc3728fd0f4f4cc9c3aa8a0 Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.834812 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-wt6ln"] Dec 03 12:15:00 crc kubenswrapper[4591]: W1203 12:15:00.837743 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod551b74b5_4ba8_4b29_b23b_fd955fbe0819.slice/crio-2553697f2fca250e29d06412d4876392047255ea375edbaa443f985ad0ea80e3 WatchSource:0}: Error finding container 2553697f2fca250e29d06412d4876392047255ea375edbaa443f985ad0ea80e3: Status 404 returned error can't find the container with id 2553697f2fca250e29d06412d4876392047255ea375edbaa443f985ad0ea80e3 Dec 03 12:15:00 crc kubenswrapper[4591]: W1203 12:15:00.875695 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72176880_d1d6_48ed_9099_15650cd27b96.slice/crio-40a19e79f7814ea6ac3fa4a93f275bb45463639603b8ce4b995d4c1d5260be78 WatchSource:0}: Error finding container 40a19e79f7814ea6ac3fa4a93f275bb45463639603b8ce4b995d4c1d5260be78: Status 404 returned error can't find the container with id 40a19e79f7814ea6ac3fa4a93f275bb45463639603b8ce4b995d4c1d5260be78 Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.875983 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-h279r"] Dec 03 12:15:00 crc kubenswrapper[4591]: I1203 12:15:00.931700 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv"] Dec 03 12:15:00 crc kubenswrapper[4591]: W1203 12:15:00.936874 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6eeb7840_a41b_4fe5_be30_52b9bd840960.slice/crio-d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41 WatchSource:0}: Error finding container d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41: Status 404 returned error can't find the container with id d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41 Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.630798 4591 generic.go:334] "Generic (PLEG): container finished" podID="6eeb7840-a41b-4fe5-be30-52b9bd840960" containerID="e9ef9031ca686ac18b2d1b66bc64933fee2705d1b66793f4877817677dabc945" exitCode=0 Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.630852 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" event={"ID":"6eeb7840-a41b-4fe5-be30-52b9bd840960","Type":"ContainerDied","Data":"e9ef9031ca686ac18b2d1b66bc64933fee2705d1b66793f4877817677dabc945"} Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.630914 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" event={"ID":"6eeb7840-a41b-4fe5-be30-52b9bd840960","Type":"ContainerStarted","Data":"d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41"} Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.632243 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" event={"ID":"551b74b5-4ba8-4b29-b23b-fd955fbe0819","Type":"ContainerStarted","Data":"2553697f2fca250e29d06412d4876392047255ea375edbaa443f985ad0ea80e3"} Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.633439 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" event={"ID":"72176880-d1d6-48ed-9099-15650cd27b96","Type":"ContainerStarted","Data":"40a19e79f7814ea6ac3fa4a93f275bb45463639603b8ce4b995d4c1d5260be78"} Dec 03 12:15:01 crc kubenswrapper[4591]: I1203 12:15:01.634347 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-ncpgb" event={"ID":"b7cb056f-6d6d-4e52-adcc-10c051df9400","Type":"ContainerStarted","Data":"b5b579cf5716a7c57ba4e0c31803bd3b4999ea4f9bc3728fd0f4f4cc9c3aa8a0"} Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.620303 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.649427 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" event={"ID":"6eeb7840-a41b-4fe5-be30-52b9bd840960","Type":"ContainerDied","Data":"d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41"} Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.649465 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6eec4263449e5fa89680ff63313780541511184805ff262f29be192cccf9f41" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.649499 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-rh2bv" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.750962 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume\") pod \"6eeb7840-a41b-4fe5-be30-52b9bd840960\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.751048 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume\") pod \"6eeb7840-a41b-4fe5-be30-52b9bd840960\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.751109 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hndjg\" (UniqueName: \"kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg\") pod \"6eeb7840-a41b-4fe5-be30-52b9bd840960\" (UID: \"6eeb7840-a41b-4fe5-be30-52b9bd840960\") " Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.751934 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume" (OuterVolumeSpecName: "config-volume") pod "6eeb7840-a41b-4fe5-be30-52b9bd840960" (UID: "6eeb7840-a41b-4fe5-be30-52b9bd840960"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.752250 4591 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6eeb7840-a41b-4fe5-be30-52b9bd840960-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.758039 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6eeb7840-a41b-4fe5-be30-52b9bd840960" (UID: "6eeb7840-a41b-4fe5-be30-52b9bd840960"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.758095 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg" (OuterVolumeSpecName: "kube-api-access-hndjg") pod "6eeb7840-a41b-4fe5-be30-52b9bd840960" (UID: "6eeb7840-a41b-4fe5-be30-52b9bd840960"). InnerVolumeSpecName "kube-api-access-hndjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.853917 4591 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6eeb7840-a41b-4fe5-be30-52b9bd840960-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:03 crc kubenswrapper[4591]: I1203 12:15:03.853950 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hndjg\" (UniqueName: \"kubernetes.io/projected/6eeb7840-a41b-4fe5-be30-52b9bd840960-kube-api-access-hndjg\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.557210 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-b7b9r" Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.656401 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-ncpgb" event={"ID":"b7cb056f-6d6d-4e52-adcc-10c051df9400","Type":"ContainerStarted","Data":"977c133472883ff84422862e17f4d823a970449c59da4c5e760adf4123775ddf"} Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.658173 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" event={"ID":"551b74b5-4ba8-4b29-b23b-fd955fbe0819","Type":"ContainerStarted","Data":"156707714a7425fd20da8e101d67e39669eb9cb5d797b2beeab9797af494ccf9"} Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.659689 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" event={"ID":"72176880-d1d6-48ed-9099-15650cd27b96","Type":"ContainerStarted","Data":"b8d47fa84e6e5948932882892a2ec15221c54d32bf19704505cde495f2c45604"} Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.659814 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.668273 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-ncpgb" podStartSLOduration=1.5687729030000002 podStartE2EDuration="4.668256588s" podCreationTimestamp="2025-12-03 12:15:00 +0000 UTC" firstStartedPulling="2025-12-03 12:15:00.802039677 +0000 UTC m=+598.229079437" lastFinishedPulling="2025-12-03 12:15:03.901523352 +0000 UTC m=+601.328563122" observedRunningTime="2025-12-03 12:15:04.66779038 +0000 UTC m=+602.094830150" watchObservedRunningTime="2025-12-03 12:15:04.668256588 +0000 UTC m=+602.095296358" Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.681046 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" podStartSLOduration=1.662193513 podStartE2EDuration="4.681019402s" podCreationTimestamp="2025-12-03 12:15:00 +0000 UTC" firstStartedPulling="2025-12-03 12:15:00.87803102 +0000 UTC m=+598.305070789" lastFinishedPulling="2025-12-03 12:15:03.896856898 +0000 UTC m=+601.323896678" observedRunningTime="2025-12-03 12:15:04.680020571 +0000 UTC m=+602.107060342" watchObservedRunningTime="2025-12-03 12:15:04.681019402 +0000 UTC m=+602.108059172" Dec 03 12:15:04 crc kubenswrapper[4591]: I1203 12:15:04.699750 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-wt6ln" podStartSLOduration=1.642909613 podStartE2EDuration="4.699732907s" podCreationTimestamp="2025-12-03 12:15:00 +0000 UTC" firstStartedPulling="2025-12-03 12:15:00.840026631 +0000 UTC m=+598.267066401" lastFinishedPulling="2025-12-03 12:15:03.896849925 +0000 UTC m=+601.323889695" observedRunningTime="2025-12-03 12:15:04.696342265 +0000 UTC m=+602.123382035" watchObservedRunningTime="2025-12-03 12:15:04.699732907 +0000 UTC m=+602.126772677" Dec 03 12:15:10 crc kubenswrapper[4591]: I1203 12:15:10.444864 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-h279r" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.179157 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw"] Dec 03 12:15:28 crc kubenswrapper[4591]: E1203 12:15:28.180746 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eeb7840-a41b-4fe5-be30-52b9bd840960" containerName="collect-profiles" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.180792 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eeb7840-a41b-4fe5-be30-52b9bd840960" containerName="collect-profiles" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.180966 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eeb7840-a41b-4fe5-be30-52b9bd840960" containerName="collect-profiles" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.181987 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.183961 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.188224 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw"] Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.235484 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfxm5\" (UniqueName: \"kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.235532 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.235568 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.336808 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfxm5\" (UniqueName: \"kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.336873 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.336914 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.337539 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.337539 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.359326 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfxm5\" (UniqueName: \"kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.392444 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l"] Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.393651 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.401588 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l"] Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.438154 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.438203 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxj4b\" (UniqueName: \"kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.438240 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.503984 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.539306 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.539423 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.539453 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxj4b\" (UniqueName: \"kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.539979 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.539990 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.556200 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxj4b\" (UniqueName: \"kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.707693 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:28 crc kubenswrapper[4591]: I1203 12:15:28.865978 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw"] Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.075052 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l"] Dec 03 12:15:29 crc kubenswrapper[4591]: W1203 12:15:29.108560 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9aab9ae9_de15_483b_96d1_1838b473c557.slice/crio-5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f WatchSource:0}: Error finding container 5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f: Status 404 returned error can't find the container with id 5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.812625 4591 generic.go:334] "Generic (PLEG): container finished" podID="9aab9ae9-de15-483b-96d1-1838b473c557" containerID="7919bca36176ef67d7e5c0c6f64761e50907163286e7febc572084be4cec29ff" exitCode=0 Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.812854 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" event={"ID":"9aab9ae9-de15-483b-96d1-1838b473c557","Type":"ContainerDied","Data":"7919bca36176ef67d7e5c0c6f64761e50907163286e7febc572084be4cec29ff"} Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.813169 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" event={"ID":"9aab9ae9-de15-483b-96d1-1838b473c557","Type":"ContainerStarted","Data":"5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f"} Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.815660 4591 generic.go:334] "Generic (PLEG): container finished" podID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerID="11e063072ecd3d20dadcf1e563ed0d72162f9959917f4fe945985ecc6c4182c0" exitCode=0 Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.815715 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" event={"ID":"6c6032e3-7ac5-4f93-97df-c35b975f6d17","Type":"ContainerDied","Data":"11e063072ecd3d20dadcf1e563ed0d72162f9959917f4fe945985ecc6c4182c0"} Dec 03 12:15:29 crc kubenswrapper[4591]: I1203 12:15:29.815747 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" event={"ID":"6c6032e3-7ac5-4f93-97df-c35b975f6d17","Type":"ContainerStarted","Data":"356f33e550fc82e132e6dd3afeabca9f976465c4570c3b029b5275015f0c1ec4"} Dec 03 12:15:31 crc kubenswrapper[4591]: I1203 12:15:31.837602 4591 generic.go:334] "Generic (PLEG): container finished" podID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerID="b818cbe7ba01fcb117a245e92b950e5233fb9afae6dbe6c45f2b7d7844aa3bc8" exitCode=0 Dec 03 12:15:31 crc kubenswrapper[4591]: I1203 12:15:31.837762 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" event={"ID":"6c6032e3-7ac5-4f93-97df-c35b975f6d17","Type":"ContainerDied","Data":"b818cbe7ba01fcb117a245e92b950e5233fb9afae6dbe6c45f2b7d7844aa3bc8"} Dec 03 12:15:31 crc kubenswrapper[4591]: I1203 12:15:31.840152 4591 generic.go:334] "Generic (PLEG): container finished" podID="9aab9ae9-de15-483b-96d1-1838b473c557" containerID="681ea336177c1f9f7d69b1e9b13a5cb1adf882ff9f4b94494f258b7f9794a7b4" exitCode=0 Dec 03 12:15:31 crc kubenswrapper[4591]: I1203 12:15:31.840194 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" event={"ID":"9aab9ae9-de15-483b-96d1-1838b473c557","Type":"ContainerDied","Data":"681ea336177c1f9f7d69b1e9b13a5cb1adf882ff9f4b94494f258b7f9794a7b4"} Dec 03 12:15:32 crc kubenswrapper[4591]: I1203 12:15:32.848900 4591 generic.go:334] "Generic (PLEG): container finished" podID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerID="43326d13da9ec51b78a6fc9f13c548894f1accd27a6ee6df4eba389a8bede23e" exitCode=0 Dec 03 12:15:32 crc kubenswrapper[4591]: I1203 12:15:32.848945 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" event={"ID":"6c6032e3-7ac5-4f93-97df-c35b975f6d17","Type":"ContainerDied","Data":"43326d13da9ec51b78a6fc9f13c548894f1accd27a6ee6df4eba389a8bede23e"} Dec 03 12:15:32 crc kubenswrapper[4591]: I1203 12:15:32.851386 4591 generic.go:334] "Generic (PLEG): container finished" podID="9aab9ae9-de15-483b-96d1-1838b473c557" containerID="045e8321464042215c8bb7cac76a9f0754500a840cdbd962f9f65d86608e522d" exitCode=0 Dec 03 12:15:32 crc kubenswrapper[4591]: I1203 12:15:32.851471 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" event={"ID":"9aab9ae9-de15-483b-96d1-1838b473c557","Type":"ContainerDied","Data":"045e8321464042215c8bb7cac76a9f0754500a840cdbd962f9f65d86608e522d"} Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.130045 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.134350 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.233866 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util\") pod \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.233933 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util\") pod \"9aab9ae9-de15-483b-96d1-1838b473c557\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.233975 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfxm5\" (UniqueName: \"kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5\") pod \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.234010 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle\") pod \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\" (UID: \"6c6032e3-7ac5-4f93-97df-c35b975f6d17\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.234024 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxj4b\" (UniqueName: \"kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b\") pod \"9aab9ae9-de15-483b-96d1-1838b473c557\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.234059 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle\") pod \"9aab9ae9-de15-483b-96d1-1838b473c557\" (UID: \"9aab9ae9-de15-483b-96d1-1838b473c557\") " Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.234961 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle" (OuterVolumeSpecName: "bundle") pod "9aab9ae9-de15-483b-96d1-1838b473c557" (UID: "9aab9ae9-de15-483b-96d1-1838b473c557"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.234972 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle" (OuterVolumeSpecName: "bundle") pod "6c6032e3-7ac5-4f93-97df-c35b975f6d17" (UID: "6c6032e3-7ac5-4f93-97df-c35b975f6d17"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.239536 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5" (OuterVolumeSpecName: "kube-api-access-sfxm5") pod "6c6032e3-7ac5-4f93-97df-c35b975f6d17" (UID: "6c6032e3-7ac5-4f93-97df-c35b975f6d17"). InnerVolumeSpecName "kube-api-access-sfxm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.239635 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b" (OuterVolumeSpecName: "kube-api-access-pxj4b") pod "9aab9ae9-de15-483b-96d1-1838b473c557" (UID: "9aab9ae9-de15-483b-96d1-1838b473c557"). InnerVolumeSpecName "kube-api-access-pxj4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.244257 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util" (OuterVolumeSpecName: "util") pod "9aab9ae9-de15-483b-96d1-1838b473c557" (UID: "9aab9ae9-de15-483b-96d1-1838b473c557"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.244367 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util" (OuterVolumeSpecName: "util") pod "6c6032e3-7ac5-4f93-97df-c35b975f6d17" (UID: "6c6032e3-7ac5-4f93-97df-c35b975f6d17"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334627 4591 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334654 4591 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334664 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfxm5\" (UniqueName: \"kubernetes.io/projected/6c6032e3-7ac5-4f93-97df-c35b975f6d17-kube-api-access-sfxm5\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334676 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxj4b\" (UniqueName: \"kubernetes.io/projected/9aab9ae9-de15-483b-96d1-1838b473c557-kube-api-access-pxj4b\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334685 4591 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c6032e3-7ac5-4f93-97df-c35b975f6d17-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.334694 4591 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9aab9ae9-de15-483b-96d1-1838b473c557-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.865849 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" event={"ID":"6c6032e3-7ac5-4f93-97df-c35b975f6d17","Type":"ContainerDied","Data":"356f33e550fc82e132e6dd3afeabca9f976465c4570c3b029b5275015f0c1ec4"} Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.865895 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="356f33e550fc82e132e6dd3afeabca9f976465c4570c3b029b5275015f0c1ec4" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.865894 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.868142 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" event={"ID":"9aab9ae9-de15-483b-96d1-1838b473c557","Type":"ContainerDied","Data":"5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f"} Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.868194 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5849c0b259459c095edfdadffc3f3a4841dfe8f6f1cc4084cd89575f0828613f" Dec 03 12:15:34 crc kubenswrapper[4591]: I1203 12:15:34.868215 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.580724 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8"] Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581213 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="pull" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581229 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="pull" Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581242 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581248 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581257 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581263 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581275 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="util" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581280 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="util" Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581286 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="util" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581291 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="util" Dec 03 12:15:45 crc kubenswrapper[4591]: E1203 12:15:45.581297 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="pull" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581303 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="pull" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581411 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aab9ae9-de15-483b-96d1-1838b473c557" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.581425 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c6032e3-7ac5-4f93-97df-c35b975f6d17" containerName="extract" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.582023 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.583736 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.588105 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.588484 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.588958 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.589010 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.589700 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-w4v9h" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.598874 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8"] Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.602739 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/5d7a891c-0e61-49b3-856e-0a35b5d53b03-manager-config\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.602792 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-apiservice-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.602903 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.602960 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th8pj\" (UniqueName: \"kubernetes.io/projected/5d7a891c-0e61-49b3-856e-0a35b5d53b03-kube-api-access-th8pj\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.603228 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-webhook-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.704387 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-webhook-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.704452 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/5d7a891c-0e61-49b3-856e-0a35b5d53b03-manager-config\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.704485 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-apiservice-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.704518 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.704544 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th8pj\" (UniqueName: \"kubernetes.io/projected/5d7a891c-0e61-49b3-856e-0a35b5d53b03-kube-api-access-th8pj\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.705317 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/5d7a891c-0e61-49b3-856e-0a35b5d53b03-manager-config\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.710869 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.711853 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-apiservice-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.719208 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d7a891c-0e61-49b3-856e-0a35b5d53b03-webhook-cert\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.725381 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th8pj\" (UniqueName: \"kubernetes.io/projected/5d7a891c-0e61-49b3-856e-0a35b5d53b03-kube-api-access-th8pj\") pod \"loki-operator-controller-manager-6d78b5768d-sb2l8\" (UID: \"5d7a891c-0e61-49b3-856e-0a35b5d53b03\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:45 crc kubenswrapper[4591]: I1203 12:15:45.896668 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:46 crc kubenswrapper[4591]: I1203 12:15:46.346355 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8"] Dec 03 12:15:46 crc kubenswrapper[4591]: I1203 12:15:46.966943 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" event={"ID":"5d7a891c-0e61-49b3-856e-0a35b5d53b03","Type":"ContainerStarted","Data":"126138ec80a15a60b18207905fd63356f28632a919d8f9edd45c85f09b49dfb3"} Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.336079 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-95tbr"] Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.337406 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.339630 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.339851 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-s4wvk" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.339999 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.352627 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-95tbr"] Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.364745 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxhlk\" (UniqueName: \"kubernetes.io/projected/a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2-kube-api-access-bxhlk\") pod \"cluster-logging-operator-ff9846bd-95tbr\" (UID: \"a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.465807 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxhlk\" (UniqueName: \"kubernetes.io/projected/a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2-kube-api-access-bxhlk\") pod \"cluster-logging-operator-ff9846bd-95tbr\" (UID: \"a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.484864 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxhlk\" (UniqueName: \"kubernetes.io/projected/a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2-kube-api-access-bxhlk\") pod \"cluster-logging-operator-ff9846bd-95tbr\" (UID: \"a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" Dec 03 12:15:49 crc kubenswrapper[4591]: I1203 12:15:49.660467 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" Dec 03 12:15:50 crc kubenswrapper[4591]: I1203 12:15:50.628152 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-95tbr"] Dec 03 12:15:50 crc kubenswrapper[4591]: W1203 12:15:50.634295 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1a26c4d_0460_4cb8_8f10_6d46fd68bbf2.slice/crio-5284f5c75d85b957fecf0d057f6d2ae2526f194f343e3109fd1e9b82036f13e0 WatchSource:0}: Error finding container 5284f5c75d85b957fecf0d057f6d2ae2526f194f343e3109fd1e9b82036f13e0: Status 404 returned error can't find the container with id 5284f5c75d85b957fecf0d057f6d2ae2526f194f343e3109fd1e9b82036f13e0 Dec 03 12:15:51 crc kubenswrapper[4591]: I1203 12:15:51.004994 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" event={"ID":"5d7a891c-0e61-49b3-856e-0a35b5d53b03","Type":"ContainerStarted","Data":"bcef9813afd2c7b3284f0eb0e96e03a8b5d8c8c9ff2ad89948c3d7ad164c6628"} Dec 03 12:15:51 crc kubenswrapper[4591]: I1203 12:15:51.006365 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" event={"ID":"a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2","Type":"ContainerStarted","Data":"5284f5c75d85b957fecf0d057f6d2ae2526f194f343e3109fd1e9b82036f13e0"} Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.060096 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" event={"ID":"a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2","Type":"ContainerStarted","Data":"99a12ffe1a139bd0389683483e7d5b7af876d51569c97c17f2848dfe366847e6"} Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.063264 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" event={"ID":"5d7a891c-0e61-49b3-856e-0a35b5d53b03","Type":"ContainerStarted","Data":"ccbc183a24c161781cba50329f84bfda52e669e9317c99770333fc1e0e045ae2"} Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.063521 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.066818 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.108640 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-ff9846bd-95tbr" podStartSLOduration=2.747372935 podStartE2EDuration="9.10862342s" podCreationTimestamp="2025-12-03 12:15:49 +0000 UTC" firstStartedPulling="2025-12-03 12:15:50.640398201 +0000 UTC m=+648.067437961" lastFinishedPulling="2025-12-03 12:15:57.001648676 +0000 UTC m=+654.428688446" observedRunningTime="2025-12-03 12:15:58.085290904 +0000 UTC m=+655.512330675" watchObservedRunningTime="2025-12-03 12:15:58.10862342 +0000 UTC m=+655.535663190" Dec 03 12:15:58 crc kubenswrapper[4591]: I1203 12:15:58.110641 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-6d78b5768d-sb2l8" podStartSLOduration=2.462883247 podStartE2EDuration="13.110637507s" podCreationTimestamp="2025-12-03 12:15:45 +0000 UTC" firstStartedPulling="2025-12-03 12:15:46.354837891 +0000 UTC m=+643.781877661" lastFinishedPulling="2025-12-03 12:15:57.002592151 +0000 UTC m=+654.429631921" observedRunningTime="2025-12-03 12:15:58.106311791 +0000 UTC m=+655.533351562" watchObservedRunningTime="2025-12-03 12:15:58.110637507 +0000 UTC m=+655.537677277" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.272946 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.274421 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.276689 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.277005 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.279266 4591 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-dpvrb" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.282573 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.410659 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dwjp\" (UniqueName: \"kubernetes.io/projected/64da05dc-d52a-46fa-85d9-88005cc27524-kube-api-access-8dwjp\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.411058 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b901ab48-7e41-484b-b73c-efff831c3f63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b901ab48-7e41-484b-b73c-efff831c3f63\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.512911 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b901ab48-7e41-484b-b73c-efff831c3f63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b901ab48-7e41-484b-b73c-efff831c3f63\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.513231 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dwjp\" (UniqueName: \"kubernetes.io/projected/64da05dc-d52a-46fa-85d9-88005cc27524-kube-api-access-8dwjp\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.518097 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.518138 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b901ab48-7e41-484b-b73c-efff831c3f63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b901ab48-7e41-484b-b73c-efff831c3f63\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/21d1d0c1c0d9938b302cc2fd77a9054b3aba8b278308efe5cadb6f112da6924f/globalmount\"" pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.532456 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dwjp\" (UniqueName: \"kubernetes.io/projected/64da05dc-d52a-46fa-85d9-88005cc27524-kube-api-access-8dwjp\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.548243 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b901ab48-7e41-484b-b73c-efff831c3f63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b901ab48-7e41-484b-b73c-efff831c3f63\") pod \"minio\" (UID: \"64da05dc-d52a-46fa-85d9-88005cc27524\") " pod="minio-dev/minio" Dec 03 12:16:03 crc kubenswrapper[4591]: I1203 12:16:03.592436 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 12:16:04 crc kubenswrapper[4591]: I1203 12:16:04.075110 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 12:16:04 crc kubenswrapper[4591]: W1203 12:16:04.078984 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64da05dc_d52a_46fa_85d9_88005cc27524.slice/crio-3291abc0f5a9d2d48cf860cd82a88afec112337b02ce044912614ceb299a0405 WatchSource:0}: Error finding container 3291abc0f5a9d2d48cf860cd82a88afec112337b02ce044912614ceb299a0405: Status 404 returned error can't find the container with id 3291abc0f5a9d2d48cf860cd82a88afec112337b02ce044912614ceb299a0405 Dec 03 12:16:04 crc kubenswrapper[4591]: I1203 12:16:04.117873 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"64da05dc-d52a-46fa-85d9-88005cc27524","Type":"ContainerStarted","Data":"3291abc0f5a9d2d48cf860cd82a88afec112337b02ce044912614ceb299a0405"} Dec 03 12:16:08 crc kubenswrapper[4591]: I1203 12:16:08.143785 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"64da05dc-d52a-46fa-85d9-88005cc27524","Type":"ContainerStarted","Data":"a4370090615e1ad98ed68e76c562c426a4c54829b9e0ea22ba4552e0afe95dce"} Dec 03 12:16:08 crc kubenswrapper[4591]: I1203 12:16:08.161078 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=5.147040099 podStartE2EDuration="8.161047536s" podCreationTimestamp="2025-12-03 12:16:00 +0000 UTC" firstStartedPulling="2025-12-03 12:16:04.081170668 +0000 UTC m=+661.508210438" lastFinishedPulling="2025-12-03 12:16:07.095178105 +0000 UTC m=+664.522217875" observedRunningTime="2025-12-03 12:16:08.158775071 +0000 UTC m=+665.585814842" watchObservedRunningTime="2025-12-03 12:16:08.161047536 +0000 UTC m=+665.588087305" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.059806 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.061360 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.064727 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.064927 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.065432 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-6lc64" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.065553 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.065671 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.066990 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.155928 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-config\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.155981 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.156046 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.156234 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.156295 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdncm\" (UniqueName: \"kubernetes.io/projected/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-kube-api-access-wdncm\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.205290 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-zk2rp"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.206102 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.217825 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.217970 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.218122 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.222898 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-zk2rp"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.258009 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.258080 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.258120 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-config\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.258168 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.258251 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259185 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259220 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8lgk\" (UniqueName: \"kubernetes.io/projected/58cfe77c-1a62-4d74-b40b-222ca7031658-kube-api-access-b8lgk\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259292 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-config\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259378 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259405 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.259435 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdncm\" (UniqueName: \"kubernetes.io/projected/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-kube-api-access-wdncm\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.260006 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-config\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.260633 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.264320 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.280040 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.282005 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.282911 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.285974 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.286045 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.297870 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.318892 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdncm\" (UniqueName: \"kubernetes.io/projected/ca5e0135-4162-41eb-8bb0-4bbe375f13dc-kube-api-access-wdncm\") pod \"logging-loki-distributor-76cc67bf56-9b7f9\" (UID: \"ca5e0135-4162-41eb-8bb0-4bbe375f13dc\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360465 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360537 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360560 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-config\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360646 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360675 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360707 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360740 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360795 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8lgk\" (UniqueName: \"kubernetes.io/projected/58cfe77c-1a62-4d74-b40b-222ca7031658-kube-api-access-b8lgk\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360826 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360857 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-config\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.360880 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf7r8\" (UniqueName: \"kubernetes.io/projected/836410c1-1063-427d-a270-dcd93f89dfd5-kube-api-access-bf7r8\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.361693 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.363887 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cfe77c-1a62-4d74-b40b-222ca7031658-config\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.364654 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.364719 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.367443 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/58cfe77c-1a62-4d74-b40b-222ca7031658-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.376186 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.383731 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8lgk\" (UniqueName: \"kubernetes.io/projected/58cfe77c-1a62-4d74-b40b-222ca7031658-kube-api-access-b8lgk\") pod \"logging-loki-querier-5895d59bb8-zk2rp\" (UID: \"58cfe77c-1a62-4d74-b40b-222ca7031658\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.395644 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.396949 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.399868 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.400183 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.400389 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.400634 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.400989 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.416533 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.417766 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.419658 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-jbgf9" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.419888 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.422996 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl"] Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462641 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462725 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf7r8\" (UniqueName: \"kubernetes.io/projected/836410c1-1063-427d-a270-dcd93f89dfd5-kube-api-access-bf7r8\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462780 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7b7d\" (UniqueName: \"kubernetes.io/projected/8395cf21-0ee1-4760-8529-0b7be4c16b92-kube-api-access-l7b7d\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462828 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462864 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462903 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462932 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-config\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.462995 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463049 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tenants\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463087 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463113 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tenants\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463180 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-rbac\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463217 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463234 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463264 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463297 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4k4w\" (UniqueName: \"kubernetes.io/projected/480391ab-2b77-43a4-96a8-c821e57eb922-kube-api-access-p4k4w\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463321 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463372 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463417 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463449 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.463470 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-rbac\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.466278 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-config\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.466710 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.470639 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.470691 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/836410c1-1063-427d-a270-dcd93f89dfd5-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.485808 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf7r8\" (UniqueName: \"kubernetes.io/projected/836410c1-1063-427d-a270-dcd93f89dfd5-kube-api-access-bf7r8\") pod \"logging-loki-query-frontend-84558f7c9f-fv8vs\" (UID: \"836410c1-1063-427d-a270-dcd93f89dfd5\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.521765 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.564966 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565048 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tenants\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565097 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565131 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tenants\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565190 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-rbac\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565207 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565241 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565258 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4k4w\" (UniqueName: \"kubernetes.io/projected/480391ab-2b77-43a4-96a8-c821e57eb922-kube-api-access-p4k4w\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565280 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565318 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565376 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565394 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-rbac\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565419 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565464 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7b7d\" (UniqueName: \"kubernetes.io/projected/8395cf21-0ee1-4760-8529-0b7be4c16b92-kube-api-access-l7b7d\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565493 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.565530 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.566094 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: E1203 12:16:12.566287 4591 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 03 12:16:12 crc kubenswrapper[4591]: E1203 12:16:12.566358 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret podName:8395cf21-0ee1-4760-8529-0b7be4c16b92 nodeName:}" failed. No retries permitted until 2025-12-03 12:16:13.066339035 +0000 UTC m=+670.493378806 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret") pod "logging-loki-gateway-5f86f97d54-cwgrl" (UID: "8395cf21-0ee1-4760-8529-0b7be4c16b92") : secret "logging-loki-gateway-http" not found Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567195 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567269 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567284 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567320 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-rbac\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: E1203 12:16:12.567405 4591 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 03 12:16:12 crc kubenswrapper[4591]: E1203 12:16:12.567473 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret podName:480391ab-2b77-43a4-96a8-c821e57eb922 nodeName:}" failed. No retries permitted until 2025-12-03 12:16:13.067454644 +0000 UTC m=+670.494494403 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret") pod "logging-loki-gateway-5f86f97d54-c4dmm" (UID: "480391ab-2b77-43a4-96a8-c821e57eb922") : secret "logging-loki-gateway-http" not found Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567519 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.567550 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-lokistack-gateway\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.568441 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/480391ab-2b77-43a4-96a8-c821e57eb922-rbac\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.568596 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tenants\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.569265 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tenants\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.571112 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.571580 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.581778 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7b7d\" (UniqueName: \"kubernetes.io/projected/8395cf21-0ee1-4760-8529-0b7be4c16b92-kube-api-access-l7b7d\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.583283 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4k4w\" (UniqueName: \"kubernetes.io/projected/480391ab-2b77-43a4-96a8-c821e57eb922-kube-api-access-p4k4w\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.606359 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.802616 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9"] Dec 03 12:16:12 crc kubenswrapper[4591]: W1203 12:16:12.808112 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca5e0135_4162_41eb_8bb0_4bbe375f13dc.slice/crio-62265152e8158ac048babc113e4593fddf38babbf8a53c1fc7afddf8edfd7b1c WatchSource:0}: Error finding container 62265152e8158ac048babc113e4593fddf38babbf8a53c1fc7afddf8edfd7b1c: Status 404 returned error can't find the container with id 62265152e8158ac048babc113e4593fddf38babbf8a53c1fc7afddf8edfd7b1c Dec 03 12:16:12 crc kubenswrapper[4591]: I1203 12:16:12.920540 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-zk2rp"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.028214 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.075865 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.076608 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.079990 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/480391ab-2b77-43a4-96a8-c821e57eb922-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-c4dmm\" (UID: \"480391ab-2b77-43a4-96a8-c821e57eb922\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.080756 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8395cf21-0ee1-4760-8529-0b7be4c16b92-tls-secret\") pod \"logging-loki-gateway-5f86f97d54-cwgrl\" (UID: \"8395cf21-0ee1-4760-8529-0b7be4c16b92\") " pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.196199 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" event={"ID":"836410c1-1063-427d-a270-dcd93f89dfd5","Type":"ContainerStarted","Data":"62d114f27eda5c76cd4030aa855c3def5ce7e1885f0f4231b5ea7da02b4e0de3"} Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.199300 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" event={"ID":"58cfe77c-1a62-4d74-b40b-222ca7031658","Type":"ContainerStarted","Data":"cf262db2228be1424f6dd99a909013cd77bb2b5dbe923a67d7f2906eb855162b"} Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.203385 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" event={"ID":"ca5e0135-4162-41eb-8bb0-4bbe375f13dc","Type":"ContainerStarted","Data":"62265152e8158ac048babc113e4593fddf38babbf8a53c1fc7afddf8edfd7b1c"} Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.206609 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.208071 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.210685 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.212810 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.220294 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.261005 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.262277 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.264815 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.264977 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.268641 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.279687 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.279738 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-config\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.279763 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.279853 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.279961 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.280029 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.280106 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpnv\" (UniqueName: \"kubernetes.io/projected/b519e20b-91ed-48f1-b3e0-9840d35ab56e-kube-api-access-xrpnv\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.280174 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.349180 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.351491 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.353364 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.353758 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.360501 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.360711 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.360849 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381118 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpnv\" (UniqueName: \"kubernetes.io/projected/b519e20b-91ed-48f1-b3e0-9840d35ab56e-kube-api-access-xrpnv\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381204 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381245 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381284 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-config\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381307 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381331 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381372 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.381411 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.386510 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.386558 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.387745 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-config\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.388357 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.388836 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.388848 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.388866 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6cac88878d08409ed24c0a787fc57e212bfd7290ba7b34571200bfac568f6bc0/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.388871 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fc711eebd6667c793c2a1a30004a4e8283c02234340ffba3494578338071d3cb/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.389113 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b519e20b-91ed-48f1-b3e0-9840d35ab56e-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.402113 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpnv\" (UniqueName: \"kubernetes.io/projected/b519e20b-91ed-48f1-b3e0-9840d35ab56e-kube-api-access-xrpnv\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.411534 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1b5f686c-b29e-44fe-83b1-2dcffa9a3d29\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.417100 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c909c189-de35-4fe8-8a3f-c99dc7ccc93b\") pod \"logging-loki-ingester-0\" (UID: \"b519e20b-91ed-48f1-b3e0-9840d35ab56e\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483348 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483398 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483422 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s6vq\" (UniqueName: \"kubernetes.io/projected/64b2bf76-1240-4b84-8881-c41e73fbe414-kube-api-access-2s6vq\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483442 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483463 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483602 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483679 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-config\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483777 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-config\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483806 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gdbm\" (UniqueName: \"kubernetes.io/projected/43b7f0c2-48eb-4736-98fe-bac9553de422-kube-api-access-2gdbm\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483838 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483884 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483948 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483966 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-24af2069-26c4-45c3-b8f4-feac83860416\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24af2069-26c4-45c3-b8f4-feac83860416\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.483992 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.528586 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.585871 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gdbm\" (UniqueName: \"kubernetes.io/projected/43b7f0c2-48eb-4736-98fe-bac9553de422-kube-api-access-2gdbm\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.585927 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.585968 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586015 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586035 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-24af2069-26c4-45c3-b8f4-feac83860416\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24af2069-26c4-45c3-b8f4-feac83860416\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586053 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586128 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586176 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s6vq\" (UniqueName: \"kubernetes.io/projected/64b2bf76-1240-4b84-8881-c41e73fbe414-kube-api-access-2s6vq\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586192 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586216 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586234 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586264 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586292 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-config\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.586334 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-config\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.587345 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-config\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.587876 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.587992 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43b7f0c2-48eb-4736-98fe-bac9553de422-config\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.588590 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.591085 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.591116 4591 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.591124 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/74f249d839c651e77d1ed992355b7da1a8cbc1f7bca6884a5fed992f034059c8/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.591167 4591 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-24af2069-26c4-45c3-b8f4-feac83860416\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24af2069-26c4-45c3-b8f4-feac83860416\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/74181dfbc5ad5888f55f27aa020541d1270532093d5943e2d837b18fb9cc7383/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.591538 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.592054 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.592674 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.593562 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.596271 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/64b2bf76-1240-4b84-8881-c41e73fbe414-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.601776 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s6vq\" (UniqueName: \"kubernetes.io/projected/64b2bf76-1240-4b84-8881-c41e73fbe414-kube-api-access-2s6vq\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.604733 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/43b7f0c2-48eb-4736-98fe-bac9553de422-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.605700 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gdbm\" (UniqueName: \"kubernetes.io/projected/43b7f0c2-48eb-4736-98fe-bac9553de422-kube-api-access-2gdbm\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.616245 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-24af2069-26c4-45c3-b8f4-feac83860416\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24af2069-26c4-45c3-b8f4-feac83860416\") pod \"logging-loki-compactor-0\" (UID: \"43b7f0c2-48eb-4736-98fe-bac9553de422\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.622145 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32bf8a7d-9dbf-4b89-8e36-bb8b60a9c721\") pod \"logging-loki-index-gateway-0\" (UID: \"64b2bf76-1240-4b84-8881-c41e73fbe414\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.736662 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl"] Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.740778 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:13 crc kubenswrapper[4591]: W1203 12:16:13.741277 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8395cf21_0ee1_4760_8529_0b7be4c16b92.slice/crio-64626d37f59a3162ec2725c343fc01f7b58ab0b8424e54b35ae5eab52f3aae4c WatchSource:0}: Error finding container 64626d37f59a3162ec2725c343fc01f7b58ab0b8424e54b35ae5eab52f3aae4c: Status 404 returned error can't find the container with id 64626d37f59a3162ec2725c343fc01f7b58ab0b8424e54b35ae5eab52f3aae4c Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.793414 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm"] Dec 03 12:16:13 crc kubenswrapper[4591]: W1203 12:16:13.796207 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod480391ab_2b77_43a4_96a8_c821e57eb922.slice/crio-889b938f1f8ee6242b2c4071762dafa3a2657e8d45216e35182f9fa4d39154e5 WatchSource:0}: Error finding container 889b938f1f8ee6242b2c4071762dafa3a2657e8d45216e35182f9fa4d39154e5: Status 404 returned error can't find the container with id 889b938f1f8ee6242b2c4071762dafa3a2657e8d45216e35182f9fa4d39154e5 Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.912577 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:13 crc kubenswrapper[4591]: I1203 12:16:13.964561 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:16:13 crc kubenswrapper[4591]: W1203 12:16:13.973504 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb519e20b_91ed_48f1_b3e0_9840d35ab56e.slice/crio-e8d66c767ce58304470cb06c8ef1adc46f3c569c7970e2fb5746dca1fc46f2e0 WatchSource:0}: Error finding container e8d66c767ce58304470cb06c8ef1adc46f3c569c7970e2fb5746dca1fc46f2e0: Status 404 returned error can't find the container with id e8d66c767ce58304470cb06c8ef1adc46f3c569c7970e2fb5746dca1fc46f2e0 Dec 03 12:16:14 crc kubenswrapper[4591]: I1203 12:16:14.211852 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"b519e20b-91ed-48f1-b3e0-9840d35ab56e","Type":"ContainerStarted","Data":"e8d66c767ce58304470cb06c8ef1adc46f3c569c7970e2fb5746dca1fc46f2e0"} Dec 03 12:16:14 crc kubenswrapper[4591]: I1203 12:16:14.213618 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" event={"ID":"480391ab-2b77-43a4-96a8-c821e57eb922","Type":"ContainerStarted","Data":"889b938f1f8ee6242b2c4071762dafa3a2657e8d45216e35182f9fa4d39154e5"} Dec 03 12:16:14 crc kubenswrapper[4591]: I1203 12:16:14.214720 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" event={"ID":"8395cf21-0ee1-4760-8529-0b7be4c16b92","Type":"ContainerStarted","Data":"64626d37f59a3162ec2725c343fc01f7b58ab0b8424e54b35ae5eab52f3aae4c"} Dec 03 12:16:14 crc kubenswrapper[4591]: I1203 12:16:14.290425 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:16:14 crc kubenswrapper[4591]: W1203 12:16:14.320646 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64b2bf76_1240_4b84_8881_c41e73fbe414.slice/crio-604d8978155e4193fd84c8e80c6cf920b3fcaea44f0e938e0b8d713eee66f7ad WatchSource:0}: Error finding container 604d8978155e4193fd84c8e80c6cf920b3fcaea44f0e938e0b8d713eee66f7ad: Status 404 returned error can't find the container with id 604d8978155e4193fd84c8e80c6cf920b3fcaea44f0e938e0b8d713eee66f7ad Dec 03 12:16:14 crc kubenswrapper[4591]: I1203 12:16:14.341450 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:16:15 crc kubenswrapper[4591]: I1203 12:16:15.225869 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"43b7f0c2-48eb-4736-98fe-bac9553de422","Type":"ContainerStarted","Data":"176d1361f4e49938c7d17fcf6c219be75821b96131ed245aba7ebbcddd8d857e"} Dec 03 12:16:15 crc kubenswrapper[4591]: I1203 12:16:15.227959 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"64b2bf76-1240-4b84-8881-c41e73fbe414","Type":"ContainerStarted","Data":"604d8978155e4193fd84c8e80c6cf920b3fcaea44f0e938e0b8d713eee66f7ad"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.243231 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"64b2bf76-1240-4b84-8881-c41e73fbe414","Type":"ContainerStarted","Data":"4f6284ce1e11e1dc58a75b6685e6d623badd154473071479b8203a741368156f"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.243743 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.247026 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" event={"ID":"58cfe77c-1a62-4d74-b40b-222ca7031658","Type":"ContainerStarted","Data":"9677cce42bbe15ff53018a572a561afb7e6ad2c28014d6b1567c44014bd143e4"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.247224 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.249687 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"b519e20b-91ed-48f1-b3e0-9840d35ab56e","Type":"ContainerStarted","Data":"c9da125abe5a7a3fc818480913b7ba77cddbf606f4332b3a00d277243c9668d1"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.249876 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.251679 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" event={"ID":"ca5e0135-4162-41eb-8bb0-4bbe375f13dc","Type":"ContainerStarted","Data":"f8fa10aafe54acdceb9d9339041652c787adb03babd7bc00c321bc14a871f979"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.251758 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.253328 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"43b7f0c2-48eb-4736-98fe-bac9553de422","Type":"ContainerStarted","Data":"9d1b1f069db6a9612b4798f1842632da9d9b860923917ac2e885823d1a7fcd17"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.253777 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.255802 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" event={"ID":"836410c1-1063-427d-a270-dcd93f89dfd5","Type":"ContainerStarted","Data":"50cdd3e7151cf0d32ca318deeac3b829f0ffe620ee72ff5f4643a73e86ecf5e6"} Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.256431 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.272705 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=3.170191037 podStartE2EDuration="4.272692054s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:14.323996269 +0000 UTC m=+671.751036039" lastFinishedPulling="2025-12-03 12:16:15.426497287 +0000 UTC m=+672.853537056" observedRunningTime="2025-12-03 12:16:16.26078539 +0000 UTC m=+673.687825160" watchObservedRunningTime="2025-12-03 12:16:16.272692054 +0000 UTC m=+673.699731824" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.285994 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" podStartSLOduration=1.7264014589999999 podStartE2EDuration="4.285973705s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:12.809954166 +0000 UTC m=+670.236993937" lastFinishedPulling="2025-12-03 12:16:15.369526414 +0000 UTC m=+672.796566183" observedRunningTime="2025-12-03 12:16:16.279677924 +0000 UTC m=+673.706717694" watchObservedRunningTime="2025-12-03 12:16:16.285973705 +0000 UTC m=+673.713013474" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.300496 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" podStartSLOduration=1.909812896 podStartE2EDuration="4.300478556s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:13.03434488 +0000 UTC m=+670.461384651" lastFinishedPulling="2025-12-03 12:16:15.425010541 +0000 UTC m=+672.852050311" observedRunningTime="2025-12-03 12:16:16.293035657 +0000 UTC m=+673.720075457" watchObservedRunningTime="2025-12-03 12:16:16.300478556 +0000 UTC m=+673.727518327" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.308681 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" podStartSLOduration=1.8841810639999999 podStartE2EDuration="4.308661336s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:12.929234734 +0000 UTC m=+670.356274504" lastFinishedPulling="2025-12-03 12:16:15.353715005 +0000 UTC m=+672.780754776" observedRunningTime="2025-12-03 12:16:16.305220685 +0000 UTC m=+673.732260455" watchObservedRunningTime="2025-12-03 12:16:16.308661336 +0000 UTC m=+673.735701106" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.326960 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=2.879549414 podStartE2EDuration="4.326946017s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:13.977616183 +0000 UTC m=+671.404655953" lastFinishedPulling="2025-12-03 12:16:15.425012785 +0000 UTC m=+672.852052556" observedRunningTime="2025-12-03 12:16:16.322007589 +0000 UTC m=+673.749047359" watchObservedRunningTime="2025-12-03 12:16:16.326946017 +0000 UTC m=+673.753985787" Dec 03 12:16:16 crc kubenswrapper[4591]: I1203 12:16:16.342054 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=3.280274085 podStartE2EDuration="4.342042311s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:14.362598353 +0000 UTC m=+671.789638123" lastFinishedPulling="2025-12-03 12:16:15.424366579 +0000 UTC m=+672.851406349" observedRunningTime="2025-12-03 12:16:16.337508443 +0000 UTC m=+673.764548223" watchObservedRunningTime="2025-12-03 12:16:16.342042311 +0000 UTC m=+673.769082081" Dec 03 12:16:17 crc kubenswrapper[4591]: I1203 12:16:17.266720 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" event={"ID":"480391ab-2b77-43a4-96a8-c821e57eb922","Type":"ContainerStarted","Data":"c7acff20b7373cd830fb07b80f37c861a068c4e2d77905dc347d43e44c94b327"} Dec 03 12:16:17 crc kubenswrapper[4591]: I1203 12:16:17.269507 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" event={"ID":"8395cf21-0ee1-4760-8529-0b7be4c16b92","Type":"ContainerStarted","Data":"b96fd4f7f5bea6bb2c38cd6bc0740ef9c22dcbed9e730408a6adb8daaf50fcae"} Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.287238 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" event={"ID":"8395cf21-0ee1-4760-8529-0b7be4c16b92","Type":"ContainerStarted","Data":"9f40d673f9ad7172af5f85319683f0d43e6bc4577617c7b57cf5129701dae55e"} Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.288030 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.288048 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.290663 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" event={"ID":"480391ab-2b77-43a4-96a8-c821e57eb922","Type":"ContainerStarted","Data":"fde36120588d658016ad58118a8435d7f0709e64bcec3d59c822d8c3ecca2fd3"} Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.291087 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.291160 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.295755 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.298712 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.302660 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.306226 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.310355 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-5f86f97d54-cwgrl" podStartSLOduration=2.686392468 podStartE2EDuration="7.31033714s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:13.751866633 +0000 UTC m=+671.178906404" lastFinishedPulling="2025-12-03 12:16:18.375811306 +0000 UTC m=+675.802851076" observedRunningTime="2025-12-03 12:16:19.305762065 +0000 UTC m=+676.732801825" watchObservedRunningTime="2025-12-03 12:16:19.31033714 +0000 UTC m=+676.737376910" Dec 03 12:16:19 crc kubenswrapper[4591]: I1203 12:16:19.344498 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-5f86f97d54-c4dmm" podStartSLOduration=2.760301128 podStartE2EDuration="7.344476921s" podCreationTimestamp="2025-12-03 12:16:12 +0000 UTC" firstStartedPulling="2025-12-03 12:16:13.79793984 +0000 UTC m=+671.224979610" lastFinishedPulling="2025-12-03 12:16:18.382115633 +0000 UTC m=+675.809155403" observedRunningTime="2025-12-03 12:16:19.339250442 +0000 UTC m=+676.766290212" watchObservedRunningTime="2025-12-03 12:16:19.344476921 +0000 UTC m=+676.771516691" Dec 03 12:16:25 crc kubenswrapper[4591]: I1203 12:16:25.299880 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:16:25 crc kubenswrapper[4591]: I1203 12:16:25.300453 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:16:32 crc kubenswrapper[4591]: I1203 12:16:32.384694 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-76cc67bf56-9b7f9" Dec 03 12:16:32 crc kubenswrapper[4591]: I1203 12:16:32.531054 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-5895d59bb8-zk2rp" Dec 03 12:16:32 crc kubenswrapper[4591]: I1203 12:16:32.612476 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-fv8vs" Dec 03 12:16:33 crc kubenswrapper[4591]: I1203 12:16:33.536747 4591 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 03 12:16:33 crc kubenswrapper[4591]: I1203 12:16:33.536819 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="b519e20b-91ed-48f1-b3e0-9840d35ab56e" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:16:33 crc kubenswrapper[4591]: I1203 12:16:33.748562 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:16:33 crc kubenswrapper[4591]: I1203 12:16:33.922693 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:16:43 crc kubenswrapper[4591]: I1203 12:16:43.535527 4591 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 03 12:16:43 crc kubenswrapper[4591]: I1203 12:16:43.536294 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="b519e20b-91ed-48f1-b3e0-9840d35ab56e" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:16:53 crc kubenswrapper[4591]: I1203 12:16:53.533117 4591 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 03 12:16:53 crc kubenswrapper[4591]: I1203 12:16:53.533727 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="b519e20b-91ed-48f1-b3e0-9840d35ab56e" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:16:55 crc kubenswrapper[4591]: I1203 12:16:55.299696 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:16:55 crc kubenswrapper[4591]: I1203 12:16:55.300253 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:17:03 crc kubenswrapper[4591]: I1203 12:17:03.532867 4591 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 03 12:17:03 crc kubenswrapper[4591]: I1203 12:17:03.533455 4591 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="b519e20b-91ed-48f1-b3e0-9840d35ab56e" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:17:13 crc kubenswrapper[4591]: I1203 12:17:13.532971 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.299591 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.300352 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.300405 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.301016 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.301091 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7" gracePeriod=600 Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.734100 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7" exitCode=0 Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.734186 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7"} Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.734359 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431"} Dec 03 12:17:25 crc kubenswrapper[4591]: I1203 12:17:25.734389 4591 scope.go:117] "RemoveContainer" containerID="f1e201b35adc4b2f415dc45c61260eb3c7549edaa83833a201b4a8f697036247" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.958209 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-7dmq9"] Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.959855 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7dmq9" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.961884 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.962436 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.962949 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.963108 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.964606 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-rbmh9" Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.969772 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-7dmq9"] Dec 03 12:17:31 crc kubenswrapper[4591]: I1203 12:17:31.979605 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.065895 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066236 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066355 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066475 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066555 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066645 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066716 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066831 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bf52\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.066919 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.067040 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.067167 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.105624 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-7dmq9"] Dec 03 12:17:32 crc kubenswrapper[4591]: E1203 12:17:32.106291 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-7bf52 metrics sa-token tmp trusted-ca], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-logging/collector-7dmq9" podUID="e560c461-02bd-4418-a52a-ff6f2fc49a24" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.168837 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169012 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169142 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169232 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169307 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169395 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bf52\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169471 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169546 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169644 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169728 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169830 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.169900 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.170403 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.170515 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.170573 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.170796 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.179935 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.179978 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.180150 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.181548 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.184398 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.185257 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bf52\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52\") pod \"collector-7dmq9\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.780351 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.790210 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7dmq9" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.880996 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bf52\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881046 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881093 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881143 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881186 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881212 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881284 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881329 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881365 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881406 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881436 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir\") pod \"e560c461-02bd-4418-a52a-ff6f2fc49a24\" (UID: \"e560c461-02bd-4418-a52a-ff6f2fc49a24\") " Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881903 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir" (OuterVolumeSpecName: "datadir") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.881942 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint" (OuterVolumeSpecName: "entrypoint") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "entrypoint". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.882395 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config" (OuterVolumeSpecName: "config") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.882496 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.882519 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.884347 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics" (OuterVolumeSpecName: "metrics") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.884387 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52" (OuterVolumeSpecName: "kube-api-access-7bf52") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "kube-api-access-7bf52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.884952 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.885338 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token" (OuterVolumeSpecName: "sa-token") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.885436 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token" (OuterVolumeSpecName: "collector-token") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "collector-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.885879 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp" (OuterVolumeSpecName: "tmp") pod "e560c461-02bd-4418-a52a-ff6f2fc49a24" (UID: "e560c461-02bd-4418-a52a-ff6f2fc49a24"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.983958 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bf52\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-kube-api-access-7bf52\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.983991 4591 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-entrypoint\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984001 4591 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984010 4591 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-collector-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984019 4591 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/e560c461-02bd-4418-a52a-ff6f2fc49a24-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984030 4591 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984039 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984049 4591 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/e560c461-02bd-4418-a52a-ff6f2fc49a24-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984056 4591 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e560c461-02bd-4418-a52a-ff6f2fc49a24-tmp\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984078 4591 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e560c461-02bd-4418-a52a-ff6f2fc49a24-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:32 crc kubenswrapper[4591]: I1203 12:17:32.984088 4591 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/e560c461-02bd-4418-a52a-ff6f2fc49a24-datadir\") on node \"crc\" DevicePath \"\"" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.789257 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-7dmq9" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.828767 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-7dmq9"] Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.836136 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-7dmq9"] Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.839868 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-fz4zs"] Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.840853 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.846452 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.846767 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.847031 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.847177 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.847684 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-rbmh9" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.852712 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.863407 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-fz4zs"] Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900360 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-sa-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900433 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/cca898ab-1b71-41a7-a104-9b7e584166d2-tmp\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900484 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-trusted-ca\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900528 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf6fr\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-kube-api-access-qf6fr\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900555 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config-openshift-service-cacrt\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900578 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/cca898ab-1b71-41a7-a104-9b7e584166d2-datadir\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900761 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900820 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-metrics\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900909 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900976 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-entrypoint\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:33 crc kubenswrapper[4591]: I1203 12:17:33.900998 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-syslog-receiver\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003106 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf6fr\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-kube-api-access-qf6fr\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003158 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config-openshift-service-cacrt\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003188 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/cca898ab-1b71-41a7-a104-9b7e584166d2-datadir\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003248 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003266 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-metrics\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003295 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003320 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-entrypoint\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003338 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-syslog-receiver\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003377 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-sa-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003403 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/cca898ab-1b71-41a7-a104-9b7e584166d2-tmp\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003430 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-trusted-ca\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.003435 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/cca898ab-1b71-41a7-a104-9b7e584166d2-datadir\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.004398 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config-openshift-service-cacrt\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.004559 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-config\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.004764 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-entrypoint\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.004993 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cca898ab-1b71-41a7-a104-9b7e584166d2-trusted-ca\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.008983 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/cca898ab-1b71-41a7-a104-9b7e584166d2-tmp\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.009592 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-syslog-receiver\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.009637 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-metrics\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.013746 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/cca898ab-1b71-41a7-a104-9b7e584166d2-collector-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.016885 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-sa-token\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.016942 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf6fr\" (UniqueName: \"kubernetes.io/projected/cca898ab-1b71-41a7-a104-9b7e584166d2-kube-api-access-qf6fr\") pod \"collector-fz4zs\" (UID: \"cca898ab-1b71-41a7-a104-9b7e584166d2\") " pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.159132 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-fz4zs" Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.531740 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-fz4zs"] Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.797694 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-fz4zs" event={"ID":"cca898ab-1b71-41a7-a104-9b7e584166d2","Type":"ContainerStarted","Data":"3775d06b33364d8080ef87c2f9eb328db74c594de1618e2ed3aea23bc4abf3c7"} Dec 03 12:17:34 crc kubenswrapper[4591]: I1203 12:17:34.898725 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e560c461-02bd-4418-a52a-ff6f2fc49a24" path="/var/lib/kubelet/pods/e560c461-02bd-4418-a52a-ff6f2fc49a24/volumes" Dec 03 12:17:39 crc kubenswrapper[4591]: I1203 12:17:39.827342 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-fz4zs" event={"ID":"cca898ab-1b71-41a7-a104-9b7e584166d2","Type":"ContainerStarted","Data":"02e16b43586127d442505c8eee70be33782a16fe4d9b01283402709bbf0daedf"} Dec 03 12:17:39 crc kubenswrapper[4591]: I1203 12:17:39.848496 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-fz4zs" podStartSLOduration=1.84589007 podStartE2EDuration="6.848477056s" podCreationTimestamp="2025-12-03 12:17:33 +0000 UTC" firstStartedPulling="2025-12-03 12:17:34.540247649 +0000 UTC m=+751.967287419" lastFinishedPulling="2025-12-03 12:17:39.542834635 +0000 UTC m=+756.969874405" observedRunningTime="2025-12-03 12:17:39.84252325 +0000 UTC m=+757.269563020" watchObservedRunningTime="2025-12-03 12:17:39.848477056 +0000 UTC m=+757.275516826" Dec 03 12:17:39 crc kubenswrapper[4591]: I1203 12:17:39.869522 4591 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.944738 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2"] Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.948354 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.953385 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.959925 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2"] Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.985128 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.985249 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:04 crc kubenswrapper[4591]: I1203 12:18:04.985354 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph424\" (UniqueName: \"kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.086907 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.087244 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph424\" (UniqueName: \"kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.087309 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.087571 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.087710 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.102585 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph424\" (UniqueName: \"kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.270095 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.633366 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2"] Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.987968 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerID="6aa4dc57e82432767ecb78b3af3c958a7587bb5d4d86467fecbe4515b04d5232" exitCode=0 Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.988026 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" event={"ID":"fa097088-c6b3-427e-a0fb-def2a61e6640","Type":"ContainerDied","Data":"6aa4dc57e82432767ecb78b3af3c958a7587bb5d4d86467fecbe4515b04d5232"} Dec 03 12:18:05 crc kubenswrapper[4591]: I1203 12:18:05.988086 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" event={"ID":"fa097088-c6b3-427e-a0fb-def2a61e6640","Type":"ContainerStarted","Data":"4e9c24f6102d6f8cd6b13c7b95b9bb20d2b7d1cc9c619e3e724622e1c44c0016"} Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.303010 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.304983 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.309261 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.321635 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.321861 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z95gl\" (UniqueName: \"kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.321942 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.423226 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z95gl\" (UniqueName: \"kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.423484 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.423525 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.423888 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.423961 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.442427 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z95gl\" (UniqueName: \"kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl\") pod \"redhat-operators-m7trv\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:07 crc kubenswrapper[4591]: I1203 12:18:07.626660 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:08 crc kubenswrapper[4591]: I1203 12:18:08.000450 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerID="47bc42b6995ce925047b9a57a5e0cb84a9b844f108d800fcfb06a3f12de6dbac" exitCode=0 Dec 03 12:18:08 crc kubenswrapper[4591]: I1203 12:18:08.000514 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" event={"ID":"fa097088-c6b3-427e-a0fb-def2a61e6640","Type":"ContainerDied","Data":"47bc42b6995ce925047b9a57a5e0cb84a9b844f108d800fcfb06a3f12de6dbac"} Dec 03 12:18:08 crc kubenswrapper[4591]: I1203 12:18:08.028779 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:08 crc kubenswrapper[4591]: W1203 12:18:08.035590 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08382f15_14ef_4a7b_b94f_78d12aaef41f.slice/crio-b0428e510d8170df90c10639ea2f6b2091667ccc1a599cde660ba6584605637a WatchSource:0}: Error finding container b0428e510d8170df90c10639ea2f6b2091667ccc1a599cde660ba6584605637a: Status 404 returned error can't find the container with id b0428e510d8170df90c10639ea2f6b2091667ccc1a599cde660ba6584605637a Dec 03 12:18:09 crc kubenswrapper[4591]: I1203 12:18:09.008146 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerID="e1bd9c3adc829c99a7f937f52e607262ccdbe1e70815111a0935c680a28a3c47" exitCode=0 Dec 03 12:18:09 crc kubenswrapper[4591]: I1203 12:18:09.008218 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" event={"ID":"fa097088-c6b3-427e-a0fb-def2a61e6640","Type":"ContainerDied","Data":"e1bd9c3adc829c99a7f937f52e607262ccdbe1e70815111a0935c680a28a3c47"} Dec 03 12:18:09 crc kubenswrapper[4591]: I1203 12:18:09.010092 4591 generic.go:334] "Generic (PLEG): container finished" podID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerID="de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63" exitCode=0 Dec 03 12:18:09 crc kubenswrapper[4591]: I1203 12:18:09.010140 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerDied","Data":"de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63"} Dec 03 12:18:09 crc kubenswrapper[4591]: I1203 12:18:09.010167 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerStarted","Data":"b0428e510d8170df90c10639ea2f6b2091667ccc1a599cde660ba6584605637a"} Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.018000 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerStarted","Data":"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164"} Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.320252 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.371671 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph424\" (UniqueName: \"kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424\") pod \"fa097088-c6b3-427e-a0fb-def2a61e6640\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.371726 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util\") pod \"fa097088-c6b3-427e-a0fb-def2a61e6640\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.371803 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle\") pod \"fa097088-c6b3-427e-a0fb-def2a61e6640\" (UID: \"fa097088-c6b3-427e-a0fb-def2a61e6640\") " Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.372352 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle" (OuterVolumeSpecName: "bundle") pod "fa097088-c6b3-427e-a0fb-def2a61e6640" (UID: "fa097088-c6b3-427e-a0fb-def2a61e6640"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.378561 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424" (OuterVolumeSpecName: "kube-api-access-ph424") pod "fa097088-c6b3-427e-a0fb-def2a61e6640" (UID: "fa097088-c6b3-427e-a0fb-def2a61e6640"). InnerVolumeSpecName "kube-api-access-ph424". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.381589 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util" (OuterVolumeSpecName: "util") pod "fa097088-c6b3-427e-a0fb-def2a61e6640" (UID: "fa097088-c6b3-427e-a0fb-def2a61e6640"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.473663 4591 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.473930 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph424\" (UniqueName: \"kubernetes.io/projected/fa097088-c6b3-427e-a0fb-def2a61e6640-kube-api-access-ph424\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:10 crc kubenswrapper[4591]: I1203 12:18:10.473943 4591 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fa097088-c6b3-427e-a0fb-def2a61e6640-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:11 crc kubenswrapper[4591]: I1203 12:18:11.025900 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" event={"ID":"fa097088-c6b3-427e-a0fb-def2a61e6640","Type":"ContainerDied","Data":"4e9c24f6102d6f8cd6b13c7b95b9bb20d2b7d1cc9c619e3e724622e1c44c0016"} Dec 03 12:18:11 crc kubenswrapper[4591]: I1203 12:18:11.025937 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2" Dec 03 12:18:11 crc kubenswrapper[4591]: I1203 12:18:11.025946 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e9c24f6102d6f8cd6b13c7b95b9bb20d2b7d1cc9c619e3e724622e1c44c0016" Dec 03 12:18:11 crc kubenswrapper[4591]: I1203 12:18:11.028173 4591 generic.go:334] "Generic (PLEG): container finished" podID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerID="bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164" exitCode=0 Dec 03 12:18:11 crc kubenswrapper[4591]: I1203 12:18:11.028226 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerDied","Data":"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164"} Dec 03 12:18:12 crc kubenswrapper[4591]: I1203 12:18:12.035939 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerStarted","Data":"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4"} Dec 03 12:18:12 crc kubenswrapper[4591]: I1203 12:18:12.047972 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m7trv" podStartSLOduration=2.549306924 podStartE2EDuration="5.047953539s" podCreationTimestamp="2025-12-03 12:18:07 +0000 UTC" firstStartedPulling="2025-12-03 12:18:09.011595949 +0000 UTC m=+786.438635719" lastFinishedPulling="2025-12-03 12:18:11.510242563 +0000 UTC m=+788.937282334" observedRunningTime="2025-12-03 12:18:12.047603781 +0000 UTC m=+789.474643551" watchObservedRunningTime="2025-12-03 12:18:12.047953539 +0000 UTC m=+789.474993309" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.720860 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c"] Dec 03 12:18:14 crc kubenswrapper[4591]: E1203 12:18:14.721417 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="extract" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.721430 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="extract" Dec 03 12:18:14 crc kubenswrapper[4591]: E1203 12:18:14.721443 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="util" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.721449 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="util" Dec 03 12:18:14 crc kubenswrapper[4591]: E1203 12:18:14.721468 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="pull" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.721474 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="pull" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.721588 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa097088-c6b3-427e-a0fb-def2a61e6640" containerName="extract" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.722126 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.723928 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-5m842" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.724635 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.724782 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.742680 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c"] Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.743313 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d8x8\" (UniqueName: \"kubernetes.io/projected/e44b18b0-0553-438b-b325-1d86e705999e-kube-api-access-5d8x8\") pod \"nmstate-operator-5b5b58f5c8-ffk7c\" (UID: \"e44b18b0-0553-438b-b325-1d86e705999e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.844918 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d8x8\" (UniqueName: \"kubernetes.io/projected/e44b18b0-0553-438b-b325-1d86e705999e-kube-api-access-5d8x8\") pod \"nmstate-operator-5b5b58f5c8-ffk7c\" (UID: \"e44b18b0-0553-438b-b325-1d86e705999e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" Dec 03 12:18:14 crc kubenswrapper[4591]: I1203 12:18:14.862156 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d8x8\" (UniqueName: \"kubernetes.io/projected/e44b18b0-0553-438b-b325-1d86e705999e-kube-api-access-5d8x8\") pod \"nmstate-operator-5b5b58f5c8-ffk7c\" (UID: \"e44b18b0-0553-438b-b325-1d86e705999e\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" Dec 03 12:18:15 crc kubenswrapper[4591]: I1203 12:18:15.050669 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" Dec 03 12:18:15 crc kubenswrapper[4591]: I1203 12:18:15.473548 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c"] Dec 03 12:18:15 crc kubenswrapper[4591]: W1203 12:18:15.475880 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode44b18b0_0553_438b_b325_1d86e705999e.slice/crio-e4b7c90dc64bad3e167b55a3007f410c732b4eea9185c1fae32160f0b5b8de60 WatchSource:0}: Error finding container e4b7c90dc64bad3e167b55a3007f410c732b4eea9185c1fae32160f0b5b8de60: Status 404 returned error can't find the container with id e4b7c90dc64bad3e167b55a3007f410c732b4eea9185c1fae32160f0b5b8de60 Dec 03 12:18:16 crc kubenswrapper[4591]: I1203 12:18:16.058568 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" event={"ID":"e44b18b0-0553-438b-b325-1d86e705999e","Type":"ContainerStarted","Data":"e4b7c90dc64bad3e167b55a3007f410c732b4eea9185c1fae32160f0b5b8de60"} Dec 03 12:18:17 crc kubenswrapper[4591]: I1203 12:18:17.627180 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:17 crc kubenswrapper[4591]: I1203 12:18:17.627457 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:17 crc kubenswrapper[4591]: I1203 12:18:17.676132 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:18 crc kubenswrapper[4591]: I1203 12:18:18.071701 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" event={"ID":"e44b18b0-0553-438b-b325-1d86e705999e","Type":"ContainerStarted","Data":"2231fb696e0c54783078e3570cb9c710c237890776a4f47c61c5245e8623bce0"} Dec 03 12:18:18 crc kubenswrapper[4591]: I1203 12:18:18.085937 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ffk7c" podStartSLOduration=1.978158251 podStartE2EDuration="4.085918028s" podCreationTimestamp="2025-12-03 12:18:14 +0000 UTC" firstStartedPulling="2025-12-03 12:18:15.477997982 +0000 UTC m=+792.905037752" lastFinishedPulling="2025-12-03 12:18:17.58575776 +0000 UTC m=+795.012797529" observedRunningTime="2025-12-03 12:18:18.082240975 +0000 UTC m=+795.509280744" watchObservedRunningTime="2025-12-03 12:18:18.085918028 +0000 UTC m=+795.512957799" Dec 03 12:18:18 crc kubenswrapper[4591]: I1203 12:18:18.108485 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.093009 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.093456 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m7trv" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="registry-server" containerID="cri-o://5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4" gracePeriod=2 Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.408183 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.441460 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities\") pod \"08382f15-14ef-4a7b-b94f-78d12aaef41f\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.441552 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z95gl\" (UniqueName: \"kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl\") pod \"08382f15-14ef-4a7b-b94f-78d12aaef41f\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.441610 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content\") pod \"08382f15-14ef-4a7b-b94f-78d12aaef41f\" (UID: \"08382f15-14ef-4a7b-b94f-78d12aaef41f\") " Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.443286 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities" (OuterVolumeSpecName: "utilities") pod "08382f15-14ef-4a7b-b94f-78d12aaef41f" (UID: "08382f15-14ef-4a7b-b94f-78d12aaef41f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.446439 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl" (OuterVolumeSpecName: "kube-api-access-z95gl") pod "08382f15-14ef-4a7b-b94f-78d12aaef41f" (UID: "08382f15-14ef-4a7b-b94f-78d12aaef41f"). InnerVolumeSpecName "kube-api-access-z95gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.543518 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:20 crc kubenswrapper[4591]: I1203 12:18:20.543552 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z95gl\" (UniqueName: \"kubernetes.io/projected/08382f15-14ef-4a7b-b94f-78d12aaef41f-kube-api-access-z95gl\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.090944 4591 generic.go:334] "Generic (PLEG): container finished" podID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerID="5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4" exitCode=0 Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.090994 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerDied","Data":"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4"} Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.091016 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m7trv" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.091043 4591 scope.go:117] "RemoveContainer" containerID="5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.091029 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m7trv" event={"ID":"08382f15-14ef-4a7b-b94f-78d12aaef41f","Type":"ContainerDied","Data":"b0428e510d8170df90c10639ea2f6b2091667ccc1a599cde660ba6584605637a"} Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.104575 4591 scope.go:117] "RemoveContainer" containerID="bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.119823 4591 scope.go:117] "RemoveContainer" containerID="de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.135217 4591 scope.go:117] "RemoveContainer" containerID="5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4" Dec 03 12:18:21 crc kubenswrapper[4591]: E1203 12:18:21.135821 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4\": container with ID starting with 5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4 not found: ID does not exist" containerID="5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.135923 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4"} err="failed to get container status \"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4\": rpc error: code = NotFound desc = could not find container \"5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4\": container with ID starting with 5f06df21ee95558d7a35118143f3dc36cfbdc0eddee7c0c1ca13fd4fa40458d4 not found: ID does not exist" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.136003 4591 scope.go:117] "RemoveContainer" containerID="bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164" Dec 03 12:18:21 crc kubenswrapper[4591]: E1203 12:18:21.136379 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164\": container with ID starting with bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164 not found: ID does not exist" containerID="bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.136413 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164"} err="failed to get container status \"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164\": rpc error: code = NotFound desc = could not find container \"bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164\": container with ID starting with bda94962aee49170d19cd9a5c2a71fb22a1f51296136902426ae3b68919f4164 not found: ID does not exist" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.136435 4591 scope.go:117] "RemoveContainer" containerID="de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63" Dec 03 12:18:21 crc kubenswrapper[4591]: E1203 12:18:21.136699 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63\": container with ID starting with de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63 not found: ID does not exist" containerID="de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.136736 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63"} err="failed to get container status \"de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63\": rpc error: code = NotFound desc = could not find container \"de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63\": container with ID starting with de8abe7c19bf1e8e5cad38e661eaf7e31c05c746c901a72f45a7aac765ba6c63 not found: ID does not exist" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.225233 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08382f15-14ef-4a7b-b94f-78d12aaef41f" (UID: "08382f15-14ef-4a7b-b94f-78d12aaef41f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.258379 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08382f15-14ef-4a7b-b94f-78d12aaef41f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.416621 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:21 crc kubenswrapper[4591]: I1203 12:18:21.420826 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m7trv"] Dec 03 12:18:22 crc kubenswrapper[4591]: I1203 12:18:22.899538 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" path="/var/lib/kubelet/pods/08382f15-14ef-4a7b-b94f-78d12aaef41f/volumes" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.794294 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft"] Dec 03 12:18:25 crc kubenswrapper[4591]: E1203 12:18:25.794840 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="registry-server" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.794856 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="registry-server" Dec 03 12:18:25 crc kubenswrapper[4591]: E1203 12:18:25.794866 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="extract-content" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.794872 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="extract-content" Dec 03 12:18:25 crc kubenswrapper[4591]: E1203 12:18:25.795139 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="extract-utilities" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.795147 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="extract-utilities" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.795301 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="08382f15-14ef-4a7b-b94f-78d12aaef41f" containerName="registry-server" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.796008 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.810911 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-q2s4c" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.816001 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9"] Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.816808 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.820579 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.828042 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft"] Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.832965 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2fxf\" (UniqueName: \"kubernetes.io/projected/5f031f8c-04a6-4523-9f1a-a70e06900dc1-kube-api-access-r2fxf\") pod \"nmstate-metrics-7f946cbc9-kjjft\" (UID: \"5f031f8c-04a6-4523-9f1a-a70e06900dc1\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.833675 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-542md"] Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.834766 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.846857 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9"] Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.934706 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-dbus-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.934775 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph7bv\" (UniqueName: \"kubernetes.io/projected/64362553-8965-4c23-82f4-fddf7acbb589-kube-api-access-ph7bv\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.934838 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.934945 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-ovs-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.935113 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-nmstate-lock\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.935149 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2fxf\" (UniqueName: \"kubernetes.io/projected/5f031f8c-04a6-4523-9f1a-a70e06900dc1-kube-api-access-r2fxf\") pod \"nmstate-metrics-7f946cbc9-kjjft\" (UID: \"5f031f8c-04a6-4523-9f1a-a70e06900dc1\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.935230 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9hjx\" (UniqueName: \"kubernetes.io/projected/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-kube-api-access-w9hjx\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.963955 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2fxf\" (UniqueName: \"kubernetes.io/projected/5f031f8c-04a6-4523-9f1a-a70e06900dc1-kube-api-access-r2fxf\") pod \"nmstate-metrics-7f946cbc9-kjjft\" (UID: \"5f031f8c-04a6-4523-9f1a-a70e06900dc1\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.986336 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg"] Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.987541 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.989947 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-7d88s" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.990131 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 12:18:25 crc kubenswrapper[4591]: I1203 12:18:25.990277 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.009471 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg"] Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.036929 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9hjx\" (UniqueName: \"kubernetes.io/projected/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-kube-api-access-w9hjx\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.036985 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-dbus-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037012 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lbbv\" (UniqueName: \"kubernetes.io/projected/d50528f6-5b01-45a7-8cc7-3bc7044a3769-kube-api-access-2lbbv\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037043 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph7bv\" (UniqueName: \"kubernetes.io/projected/64362553-8965-4c23-82f4-fddf7acbb589-kube-api-access-ph7bv\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037077 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037108 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50528f6-5b01-45a7-8cc7-3bc7044a3769-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037128 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-ovs-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037180 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50528f6-5b01-45a7-8cc7-3bc7044a3769-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037220 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-nmstate-lock\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037304 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-nmstate-lock\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037355 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-ovs-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: E1203 12:18:26.037355 4591 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 12:18:26 crc kubenswrapper[4591]: E1203 12:18:26.037416 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair podName:c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c nodeName:}" failed. No retries permitted until 2025-12-03 12:18:26.537398175 +0000 UTC m=+803.964437945 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-kkdq9" (UID: "c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c") : secret "openshift-nmstate-webhook" not found Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.037568 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64362553-8965-4c23-82f4-fddf7acbb589-dbus-socket\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.064434 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph7bv\" (UniqueName: \"kubernetes.io/projected/64362553-8965-4c23-82f4-fddf7acbb589-kube-api-access-ph7bv\") pod \"nmstate-handler-542md\" (UID: \"64362553-8965-4c23-82f4-fddf7acbb589\") " pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.077662 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9hjx\" (UniqueName: \"kubernetes.io/projected/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-kube-api-access-w9hjx\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.114188 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.138967 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lbbv\" (UniqueName: \"kubernetes.io/projected/d50528f6-5b01-45a7-8cc7-3bc7044a3769-kube-api-access-2lbbv\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.139592 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50528f6-5b01-45a7-8cc7-3bc7044a3769-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.140388 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50528f6-5b01-45a7-8cc7-3bc7044a3769-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.141288 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50528f6-5b01-45a7-8cc7-3bc7044a3769-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.143029 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50528f6-5b01-45a7-8cc7-3bc7044a3769-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.148235 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-57cb8dcd97-krzdz"] Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.149142 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.149245 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.166568 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lbbv\" (UniqueName: \"kubernetes.io/projected/d50528f6-5b01-45a7-8cc7-3bc7044a3769-kube-api-access-2lbbv\") pod \"nmstate-console-plugin-7fbb5f6569-phtsg\" (UID: \"d50528f6-5b01-45a7-8cc7-3bc7044a3769\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.187115 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57cb8dcd97-krzdz"] Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243084 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-service-ca\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243141 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243163 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7rqc\" (UniqueName: \"kubernetes.io/projected/c3b978ba-dc24-4767-ba25-16b0ae39164a-kube-api-access-w7rqc\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243180 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243199 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-trusted-ca-bundle\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243230 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-oauth-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.243250 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-oauth-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.308024 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344639 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-trusted-ca-bundle\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344693 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-oauth-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344728 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-oauth-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344842 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-service-ca\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344892 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344909 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7rqc\" (UniqueName: \"kubernetes.io/projected/c3b978ba-dc24-4767-ba25-16b0ae39164a-kube-api-access-w7rqc\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.344925 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.345908 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.345960 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-trusted-ca-bundle\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.346528 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-service-ca\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.347049 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c3b978ba-dc24-4767-ba25-16b0ae39164a-oauth-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.351575 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-serving-cert\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.351816 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c3b978ba-dc24-4767-ba25-16b0ae39164a-console-oauth-config\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.365588 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7rqc\" (UniqueName: \"kubernetes.io/projected/c3b978ba-dc24-4767-ba25-16b0ae39164a-kube-api-access-w7rqc\") pod \"console-57cb8dcd97-krzdz\" (UID: \"c3b978ba-dc24-4767-ba25-16b0ae39164a\") " pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.516316 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.547624 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.551888 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft"] Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.552557 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-kkdq9\" (UID: \"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: W1203 12:18:26.554736 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f031f8c_04a6_4523_9f1a_a70e06900dc1.slice/crio-e44598a150905780eae63e14daa59df68f0afd74a3e7e0e1d45768effd5c4d17 WatchSource:0}: Error finding container e44598a150905780eae63e14daa59df68f0afd74a3e7e0e1d45768effd5c4d17: Status 404 returned error can't find the container with id e44598a150905780eae63e14daa59df68f0afd74a3e7e0e1d45768effd5c4d17 Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.698551 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg"] Dec 03 12:18:26 crc kubenswrapper[4591]: W1203 12:18:26.702028 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd50528f6_5b01_45a7_8cc7_3bc7044a3769.slice/crio-233347f50826bbcdfd82a4fab96e386314add6e38f22ddf2c21b5047be907d93 WatchSource:0}: Error finding container 233347f50826bbcdfd82a4fab96e386314add6e38f22ddf2c21b5047be907d93: Status 404 returned error can't find the container with id 233347f50826bbcdfd82a4fab96e386314add6e38f22ddf2c21b5047be907d93 Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.729588 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:26 crc kubenswrapper[4591]: I1203 12:18:26.878016 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-57cb8dcd97-krzdz"] Dec 03 12:18:26 crc kubenswrapper[4591]: W1203 12:18:26.882595 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3b978ba_dc24_4767_ba25_16b0ae39164a.slice/crio-4e0460c83cf63580ff28a02467349c998fe98d635923c27f326fb48c3d9ab07e WatchSource:0}: Error finding container 4e0460c83cf63580ff28a02467349c998fe98d635923c27f326fb48c3d9ab07e: Status 404 returned error can't find the container with id 4e0460c83cf63580ff28a02467349c998fe98d635923c27f326fb48c3d9ab07e Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.092123 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9"] Dec 03 12:18:27 crc kubenswrapper[4591]: W1203 12:18:27.098401 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6152ed7_d2a1_47dd_a9dc_c208f72b7f0c.slice/crio-44a939a6a4a4d25a5d1f32d01e78e870b0d4ea3bdc823a890d99b99ca69887dc WatchSource:0}: Error finding container 44a939a6a4a4d25a5d1f32d01e78e870b0d4ea3bdc823a890d99b99ca69887dc: Status 404 returned error can't find the container with id 44a939a6a4a4d25a5d1f32d01e78e870b0d4ea3bdc823a890d99b99ca69887dc Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.142352 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-542md" event={"ID":"64362553-8965-4c23-82f4-fddf7acbb589","Type":"ContainerStarted","Data":"8867a5c90ecaaa4a56684a23707ad19b06add9e96082313a0ed4388a6f9599f0"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.143933 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" event={"ID":"5f031f8c-04a6-4523-9f1a-a70e06900dc1","Type":"ContainerStarted","Data":"e44598a150905780eae63e14daa59df68f0afd74a3e7e0e1d45768effd5c4d17"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.145740 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57cb8dcd97-krzdz" event={"ID":"c3b978ba-dc24-4767-ba25-16b0ae39164a","Type":"ContainerStarted","Data":"908600e4b55a77d413ec897b283afffa389a9ae159267beb40e2bfe0d83832af"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.145796 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-57cb8dcd97-krzdz" event={"ID":"c3b978ba-dc24-4767-ba25-16b0ae39164a","Type":"ContainerStarted","Data":"4e0460c83cf63580ff28a02467349c998fe98d635923c27f326fb48c3d9ab07e"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.146912 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" event={"ID":"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c","Type":"ContainerStarted","Data":"44a939a6a4a4d25a5d1f32d01e78e870b0d4ea3bdc823a890d99b99ca69887dc"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.148631 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" event={"ID":"d50528f6-5b01-45a7-8cc7-3bc7044a3769","Type":"ContainerStarted","Data":"233347f50826bbcdfd82a4fab96e386314add6e38f22ddf2c21b5047be907d93"} Dec 03 12:18:27 crc kubenswrapper[4591]: I1203 12:18:27.163565 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-57cb8dcd97-krzdz" podStartSLOduration=1.163552097 podStartE2EDuration="1.163552097s" podCreationTimestamp="2025-12-03 12:18:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:18:27.159975562 +0000 UTC m=+804.587015332" watchObservedRunningTime="2025-12-03 12:18:27.163552097 +0000 UTC m=+804.590591868" Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.174171 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" event={"ID":"5f031f8c-04a6-4523-9f1a-a70e06900dc1","Type":"ContainerStarted","Data":"096227301b6e709bc3e351f0659e13e2c263c71daa0c1a3ffea51d2c39b544b6"} Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.175867 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" event={"ID":"c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c","Type":"ContainerStarted","Data":"f2d1c6dadc3af25ce65161c9afa6ba4ca73dc79dd690bfc358ecae92a7b38cf7"} Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.176239 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.177358 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" event={"ID":"d50528f6-5b01-45a7-8cc7-3bc7044a3769","Type":"ContainerStarted","Data":"bc08b7d9196669b3df4ecaa63cd82b7e8e14416716b48e2dcc53896ab9191695"} Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.179046 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-542md" event={"ID":"64362553-8965-4c23-82f4-fddf7acbb589","Type":"ContainerStarted","Data":"3e8b24e58018ba62dcf423fc028589335d49e514051e6fb90149cd01a1495538"} Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.179380 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.208599 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" podStartSLOduration=3.129262139 podStartE2EDuration="5.208586715s" podCreationTimestamp="2025-12-03 12:18:25 +0000 UTC" firstStartedPulling="2025-12-03 12:18:27.109371599 +0000 UTC m=+804.536411369" lastFinishedPulling="2025-12-03 12:18:29.188696174 +0000 UTC m=+806.615735945" observedRunningTime="2025-12-03 12:18:30.2019314 +0000 UTC m=+807.628971170" watchObservedRunningTime="2025-12-03 12:18:30.208586715 +0000 UTC m=+807.635626475" Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.223681 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-phtsg" podStartSLOduration=2.743508516 podStartE2EDuration="5.223660692s" podCreationTimestamp="2025-12-03 12:18:25 +0000 UTC" firstStartedPulling="2025-12-03 12:18:26.70418983 +0000 UTC m=+804.131229601" lastFinishedPulling="2025-12-03 12:18:29.184342007 +0000 UTC m=+806.611381777" observedRunningTime="2025-12-03 12:18:30.22159069 +0000 UTC m=+807.648630460" watchObservedRunningTime="2025-12-03 12:18:30.223660692 +0000 UTC m=+807.650700463" Dec 03 12:18:30 crc kubenswrapper[4591]: I1203 12:18:30.236617 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-542md" podStartSLOduration=2.243038025 podStartE2EDuration="5.236604334s" podCreationTimestamp="2025-12-03 12:18:25 +0000 UTC" firstStartedPulling="2025-12-03 12:18:26.19896708 +0000 UTC m=+803.626006851" lastFinishedPulling="2025-12-03 12:18:29.19253339 +0000 UTC m=+806.619573160" observedRunningTime="2025-12-03 12:18:30.232499144 +0000 UTC m=+807.659538914" watchObservedRunningTime="2025-12-03 12:18:30.236604334 +0000 UTC m=+807.663644103" Dec 03 12:18:32 crc kubenswrapper[4591]: I1203 12:18:32.192872 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" event={"ID":"5f031f8c-04a6-4523-9f1a-a70e06900dc1","Type":"ContainerStarted","Data":"4092c7d1c1ad97f95f924703fd8c089112c53627a46b1d3aebb9efc2b709fef2"} Dec 03 12:18:36 crc kubenswrapper[4591]: I1203 12:18:36.170151 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-542md" Dec 03 12:18:36 crc kubenswrapper[4591]: I1203 12:18:36.184125 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-kjjft" podStartSLOduration=6.464324526 podStartE2EDuration="11.184105634s" podCreationTimestamp="2025-12-03 12:18:25 +0000 UTC" firstStartedPulling="2025-12-03 12:18:26.557466953 +0000 UTC m=+803.984506723" lastFinishedPulling="2025-12-03 12:18:31.277248062 +0000 UTC m=+808.704287831" observedRunningTime="2025-12-03 12:18:32.21331524 +0000 UTC m=+809.640355010" watchObservedRunningTime="2025-12-03 12:18:36.184105634 +0000 UTC m=+813.611145404" Dec 03 12:18:36 crc kubenswrapper[4591]: I1203 12:18:36.517229 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:36 crc kubenswrapper[4591]: I1203 12:18:36.517348 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:36 crc kubenswrapper[4591]: I1203 12:18:36.522437 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:37 crc kubenswrapper[4591]: I1203 12:18:37.230623 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-57cb8dcd97-krzdz" Dec 03 12:18:37 crc kubenswrapper[4591]: I1203 12:18:37.276737 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:18:46 crc kubenswrapper[4591]: I1203 12:18:46.734564 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-kkdq9" Dec 03 12:19:00 crc kubenswrapper[4591]: I1203 12:19:00.995343 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq"] Dec 03 12:19:00 crc kubenswrapper[4591]: I1203 12:19:00.997260 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.000216 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.003417 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq"] Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.094891 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.094945 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.095164 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrqgd\" (UniqueName: \"kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.196982 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrqgd\" (UniqueName: \"kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.197101 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.197127 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.197554 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.197596 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.214089 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrqgd\" (UniqueName: \"kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.317085 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:01 crc kubenswrapper[4591]: I1203 12:19:01.683811 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq"] Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.309155 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-7c96666cff-9fpl8" podUID="ef80fee5-f895-4f71-a44b-13172da0afd2" containerName="console" containerID="cri-o://5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134" gracePeriod=15 Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.395650 4591 generic.go:334] "Generic (PLEG): container finished" podID="111df2c3-a57c-412d-b593-390f211cc05c" containerID="e8e5ff8c7a959fba656fa6ff79bb363c092b2dcf64d1bd8b5c52dacccf0b91d7" exitCode=0 Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.395729 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" event={"ID":"111df2c3-a57c-412d-b593-390f211cc05c","Type":"ContainerDied","Data":"e8e5ff8c7a959fba656fa6ff79bb363c092b2dcf64d1bd8b5c52dacccf0b91d7"} Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.395983 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" event={"ID":"111df2c3-a57c-412d-b593-390f211cc05c","Type":"ContainerStarted","Data":"4ff81cc163ae972d95810199ad57ade78c6423f2cb3710127843bff4b8eed95f"} Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.397671 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.655256 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7c96666cff-9fpl8_ef80fee5-f895-4f71-a44b-13172da0afd2/console/0.log" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.655348 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824155 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824202 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824235 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824313 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8v2k\" (UniqueName: \"kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824359 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824465 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.824540 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert\") pod \"ef80fee5-f895-4f71-a44b-13172da0afd2\" (UID: \"ef80fee5-f895-4f71-a44b-13172da0afd2\") " Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.825366 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.825464 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config" (OuterVolumeSpecName: "console-config") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.825704 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca" (OuterVolumeSpecName: "service-ca") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.825744 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.830141 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.830907 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.831008 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k" (OuterVolumeSpecName: "kube-api-access-g8v2k") pod "ef80fee5-f895-4f71-a44b-13172da0afd2" (UID: "ef80fee5-f895-4f71-a44b-13172da0afd2"). InnerVolumeSpecName "kube-api-access-g8v2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926506 4591 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926558 4591 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926568 4591 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926578 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8v2k\" (UniqueName: \"kubernetes.io/projected/ef80fee5-f895-4f71-a44b-13172da0afd2-kube-api-access-g8v2k\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926589 4591 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926598 4591 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef80fee5-f895-4f71-a44b-13172da0afd2-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:02 crc kubenswrapper[4591]: I1203 12:19:02.926607 4591 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef80fee5-f895-4f71-a44b-13172da0afd2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403274 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7c96666cff-9fpl8_ef80fee5-f895-4f71-a44b-13172da0afd2/console/0.log" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403548 4591 generic.go:334] "Generic (PLEG): container finished" podID="ef80fee5-f895-4f71-a44b-13172da0afd2" containerID="5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134" exitCode=2 Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403579 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-9fpl8" event={"ID":"ef80fee5-f895-4f71-a44b-13172da0afd2","Type":"ContainerDied","Data":"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134"} Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403613 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-9fpl8" event={"ID":"ef80fee5-f895-4f71-a44b-13172da0afd2","Type":"ContainerDied","Data":"39aabeea8c46a724a11dc2f66eab5563c0b4d12991411f94ef9c39c20b1df576"} Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403655 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-9fpl8" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.403654 4591 scope.go:117] "RemoveContainer" containerID="5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.421373 4591 scope.go:117] "RemoveContainer" containerID="5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134" Dec 03 12:19:03 crc kubenswrapper[4591]: E1203 12:19:03.421715 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134\": container with ID starting with 5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134 not found: ID does not exist" containerID="5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.421753 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134"} err="failed to get container status \"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134\": rpc error: code = NotFound desc = could not find container \"5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134\": container with ID starting with 5b0c54a4efe382072f8c1d759f0530bc5c821d97adb378126267f37348186134 not found: ID does not exist" Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.422929 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:19:03 crc kubenswrapper[4591]: I1203 12:19:03.426602 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-7c96666cff-9fpl8"] Dec 03 12:19:04 crc kubenswrapper[4591]: I1203 12:19:04.412102 4591 generic.go:334] "Generic (PLEG): container finished" podID="111df2c3-a57c-412d-b593-390f211cc05c" containerID="021e0006886a05c94823d9da962f2635a613a662f19d5400d3152adee7c95340" exitCode=0 Dec 03 12:19:04 crc kubenswrapper[4591]: I1203 12:19:04.412162 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" event={"ID":"111df2c3-a57c-412d-b593-390f211cc05c","Type":"ContainerDied","Data":"021e0006886a05c94823d9da962f2635a613a662f19d5400d3152adee7c95340"} Dec 03 12:19:04 crc kubenswrapper[4591]: I1203 12:19:04.898143 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef80fee5-f895-4f71-a44b-13172da0afd2" path="/var/lib/kubelet/pods/ef80fee5-f895-4f71-a44b-13172da0afd2/volumes" Dec 03 12:19:05 crc kubenswrapper[4591]: I1203 12:19:05.420122 4591 generic.go:334] "Generic (PLEG): container finished" podID="111df2c3-a57c-412d-b593-390f211cc05c" containerID="d84554c0fe897bb54b0323acc71515193a714a721cd00cc1808865e8fe29bf72" exitCode=0 Dec 03 12:19:05 crc kubenswrapper[4591]: I1203 12:19:05.420172 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" event={"ID":"111df2c3-a57c-412d-b593-390f211cc05c","Type":"ContainerDied","Data":"d84554c0fe897bb54b0323acc71515193a714a721cd00cc1808865e8fe29bf72"} Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.657654 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.792454 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util\") pod \"111df2c3-a57c-412d-b593-390f211cc05c\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.792898 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle\") pod \"111df2c3-a57c-412d-b593-390f211cc05c\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.792971 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrqgd\" (UniqueName: \"kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd\") pod \"111df2c3-a57c-412d-b593-390f211cc05c\" (UID: \"111df2c3-a57c-412d-b593-390f211cc05c\") " Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.793739 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle" (OuterVolumeSpecName: "bundle") pod "111df2c3-a57c-412d-b593-390f211cc05c" (UID: "111df2c3-a57c-412d-b593-390f211cc05c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.798732 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd" (OuterVolumeSpecName: "kube-api-access-vrqgd") pod "111df2c3-a57c-412d-b593-390f211cc05c" (UID: "111df2c3-a57c-412d-b593-390f211cc05c"). InnerVolumeSpecName "kube-api-access-vrqgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.802932 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util" (OuterVolumeSpecName: "util") pod "111df2c3-a57c-412d-b593-390f211cc05c" (UID: "111df2c3-a57c-412d-b593-390f211cc05c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.894971 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrqgd\" (UniqueName: \"kubernetes.io/projected/111df2c3-a57c-412d-b593-390f211cc05c-kube-api-access-vrqgd\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.895017 4591 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:06 crc kubenswrapper[4591]: I1203 12:19:06.895028 4591 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/111df2c3-a57c-412d-b593-390f211cc05c-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:19:07 crc kubenswrapper[4591]: I1203 12:19:07.434121 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" event={"ID":"111df2c3-a57c-412d-b593-390f211cc05c","Type":"ContainerDied","Data":"4ff81cc163ae972d95810199ad57ade78c6423f2cb3710127843bff4b8eed95f"} Dec 03 12:19:07 crc kubenswrapper[4591]: I1203 12:19:07.434176 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ff81cc163ae972d95810199ad57ade78c6423f2cb3710127843bff4b8eed95f" Dec 03 12:19:07 crc kubenswrapper[4591]: I1203 12:19:07.434189 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.919186 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v"] Dec 03 12:19:16 crc kubenswrapper[4591]: E1203 12:19:16.919992 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef80fee5-f895-4f71-a44b-13172da0afd2" containerName="console" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920006 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef80fee5-f895-4f71-a44b-13172da0afd2" containerName="console" Dec 03 12:19:16 crc kubenswrapper[4591]: E1203 12:19:16.920022 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="util" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920028 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="util" Dec 03 12:19:16 crc kubenswrapper[4591]: E1203 12:19:16.920036 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="extract" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920041 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="extract" Dec 03 12:19:16 crc kubenswrapper[4591]: E1203 12:19:16.920057 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="pull" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920077 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="pull" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920216 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef80fee5-f895-4f71-a44b-13172da0afd2" containerName="console" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920244 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="111df2c3-a57c-412d-b593-390f211cc05c" containerName="extract" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.920742 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.927561 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.927776 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.928345 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.928587 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.929700 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-cmfzl" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.942525 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v"] Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.962339 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-apiservice-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.962395 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76dqm\" (UniqueName: \"kubernetes.io/projected/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-kube-api-access-76dqm\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:16 crc kubenswrapper[4591]: I1203 12:19:16.962558 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-webhook-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.064453 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-webhook-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.064649 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-apiservice-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.064719 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76dqm\" (UniqueName: \"kubernetes.io/projected/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-kube-api-access-76dqm\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.071094 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-apiservice-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.071139 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-webhook-cert\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.080720 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76dqm\" (UniqueName: \"kubernetes.io/projected/89a2cfa8-cba7-4315-8f4a-dc73986ea26b-kube-api-access-76dqm\") pod \"metallb-operator-controller-manager-56654d8dc6-cw82v\" (UID: \"89a2cfa8-cba7-4315-8f4a-dc73986ea26b\") " pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.160127 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn"] Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.161204 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.163026 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-rlsxz" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.163772 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.164385 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.174947 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn"] Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.239455 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.268129 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-webhook-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.268394 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k4fs\" (UniqueName: \"kubernetes.io/projected/35810136-753c-4212-ba53-251ecd811b4a-kube-api-access-7k4fs\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.268808 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-apiservice-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.370221 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k4fs\" (UniqueName: \"kubernetes.io/projected/35810136-753c-4212-ba53-251ecd811b4a-kube-api-access-7k4fs\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.372287 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-apiservice-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.372334 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-webhook-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.377398 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-apiservice-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.377589 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35810136-753c-4212-ba53-251ecd811b4a-webhook-cert\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.406729 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k4fs\" (UniqueName: \"kubernetes.io/projected/35810136-753c-4212-ba53-251ecd811b4a-kube-api-access-7k4fs\") pod \"metallb-operator-webhook-server-c9cc967c7-kgksn\" (UID: \"35810136-753c-4212-ba53-251ecd811b4a\") " pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.475942 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.649054 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v"] Dec 03 12:19:17 crc kubenswrapper[4591]: W1203 12:19:17.650500 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89a2cfa8_cba7_4315_8f4a_dc73986ea26b.slice/crio-4953fa71f7cf5952f9047a7d941e75244e2257600393d347efda5d6f0248af8c WatchSource:0}: Error finding container 4953fa71f7cf5952f9047a7d941e75244e2257600393d347efda5d6f0248af8c: Status 404 returned error can't find the container with id 4953fa71f7cf5952f9047a7d941e75244e2257600393d347efda5d6f0248af8c Dec 03 12:19:17 crc kubenswrapper[4591]: I1203 12:19:17.894901 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn"] Dec 03 12:19:18 crc kubenswrapper[4591]: I1203 12:19:18.510516 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" event={"ID":"35810136-753c-4212-ba53-251ecd811b4a","Type":"ContainerStarted","Data":"4fbeb4cebe341c730fcc76a320883d05800face1aa42c6f4002d160031923e07"} Dec 03 12:19:18 crc kubenswrapper[4591]: I1203 12:19:18.512657 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" event={"ID":"89a2cfa8-cba7-4315-8f4a-dc73986ea26b","Type":"ContainerStarted","Data":"4953fa71f7cf5952f9047a7d941e75244e2257600393d347efda5d6f0248af8c"} Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.544327 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" event={"ID":"35810136-753c-4212-ba53-251ecd811b4a","Type":"ContainerStarted","Data":"6d73a710c31ed14d133ecc5a0c61bb28eff35b54cd0e6bc11f7ab8eea8412b2b"} Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.545817 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.546150 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" event={"ID":"89a2cfa8-cba7-4315-8f4a-dc73986ea26b","Type":"ContainerStarted","Data":"62542b3cfd6011ed30be038cdf9d2899d097b349caa06bdfb4bd2d9baaf9230e"} Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.546220 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.560033 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" podStartSLOduration=1.962975312 podStartE2EDuration="5.560020392s" podCreationTimestamp="2025-12-03 12:19:17 +0000 UTC" firstStartedPulling="2025-12-03 12:19:17.898259349 +0000 UTC m=+855.325299118" lastFinishedPulling="2025-12-03 12:19:21.495304428 +0000 UTC m=+858.922344198" observedRunningTime="2025-12-03 12:19:22.558004561 +0000 UTC m=+859.985044331" watchObservedRunningTime="2025-12-03 12:19:22.560020392 +0000 UTC m=+859.987060161" Dec 03 12:19:22 crc kubenswrapper[4591]: I1203 12:19:22.574341 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" podStartSLOduration=2.74577752 podStartE2EDuration="6.574328388s" podCreationTimestamp="2025-12-03 12:19:16 +0000 UTC" firstStartedPulling="2025-12-03 12:19:17.660224753 +0000 UTC m=+855.087264522" lastFinishedPulling="2025-12-03 12:19:21.488775621 +0000 UTC m=+858.915815390" observedRunningTime="2025-12-03 12:19:22.573978631 +0000 UTC m=+860.001018401" watchObservedRunningTime="2025-12-03 12:19:22.574328388 +0000 UTC m=+860.001368159" Dec 03 12:19:25 crc kubenswrapper[4591]: I1203 12:19:25.300000 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:19:25 crc kubenswrapper[4591]: I1203 12:19:25.300442 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:19:37 crc kubenswrapper[4591]: I1203 12:19:37.480621 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-c9cc967c7-kgksn" Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.811468 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.813201 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.818288 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.953550 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdq9f\" (UniqueName: \"kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.953607 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:49 crc kubenswrapper[4591]: I1203 12:19:49.953807 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.055498 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdq9f\" (UniqueName: \"kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.055564 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.055697 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.056124 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.056417 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.072289 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdq9f\" (UniqueName: \"kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f\") pod \"certified-operators-l7kft\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.129690 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.560399 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.742952 4591 generic.go:334] "Generic (PLEG): container finished" podID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerID="e8742dc42de67d22aebb125753a67c8c57fe96ec985f3babd8bdccd6a91fd2b8" exitCode=0 Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.742997 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerDied","Data":"e8742dc42de67d22aebb125753a67c8c57fe96ec985f3babd8bdccd6a91fd2b8"} Dec 03 12:19:50 crc kubenswrapper[4591]: I1203 12:19:50.743026 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerStarted","Data":"55f8e4d228552580eb3dd2beede7056df068d4f344f17ff6555c982635404a78"} Dec 03 12:19:51 crc kubenswrapper[4591]: I1203 12:19:51.751404 4591 generic.go:334] "Generic (PLEG): container finished" podID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerID="5af892f1028ad7cd77a8c1a743cd0ac9f9fc0566b141102eff08bdc947db43f1" exitCode=0 Dec 03 12:19:51 crc kubenswrapper[4591]: I1203 12:19:51.751507 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerDied","Data":"5af892f1028ad7cd77a8c1a743cd0ac9f9fc0566b141102eff08bdc947db43f1"} Dec 03 12:19:52 crc kubenswrapper[4591]: I1203 12:19:52.760339 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerStarted","Data":"cef21809b589e26a7781a389ff6a6fb2099bdcd18483c05006e526cd11c19af3"} Dec 03 12:19:52 crc kubenswrapper[4591]: I1203 12:19:52.775705 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l7kft" podStartSLOduration=2.230347347 podStartE2EDuration="3.775688421s" podCreationTimestamp="2025-12-03 12:19:49 +0000 UTC" firstStartedPulling="2025-12-03 12:19:50.744792736 +0000 UTC m=+888.171832506" lastFinishedPulling="2025-12-03 12:19:52.290133811 +0000 UTC m=+889.717173580" observedRunningTime="2025-12-03 12:19:52.775107188 +0000 UTC m=+890.202146958" watchObservedRunningTime="2025-12-03 12:19:52.775688421 +0000 UTC m=+890.202728191" Dec 03 12:19:55 crc kubenswrapper[4591]: I1203 12:19:55.299725 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:19:55 crc kubenswrapper[4591]: I1203 12:19:55.299790 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.242793 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-56654d8dc6-cw82v" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.812108 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-czlhw"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.814815 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.816039 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.816108 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-tl2n6" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.816333 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.821780 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.822430 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.825006 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.830524 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.879220 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-sk4bx"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.880775 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-sk4bx" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.882434 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.882604 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.882807 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-6sf2r" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.883629 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-t29t8"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.884738 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.886709 4591 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.887169 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.903896 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-t29t8"] Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.989989 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n78dw\" (UniqueName: \"kubernetes.io/projected/18d17b40-9915-4646-a939-0c23134df87e-kube-api-access-n78dw\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990052 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/18d17b40-9915-4646-a939-0c23134df87e-metallb-excludel2\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990158 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-reloader\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990235 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-metrics-certs\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990325 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p55w5\" (UniqueName: \"kubernetes.io/projected/9bf70e0c-4a12-4965-85c9-212b85622dc0-kube-api-access-p55w5\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990364 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxpz\" (UniqueName: \"kubernetes.io/projected/102945b3-6f0d-43b1-b347-7a7fd637ce88-kube-api-access-ppxpz\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990398 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-sockets\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990453 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990509 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990609 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-metrics-certs\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990665 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics-certs\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990786 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp7f9\" (UniqueName: \"kubernetes.io/projected/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-kube-api-access-wp7f9\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990838 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-startup\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990856 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990882 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-conf\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:57 crc kubenswrapper[4591]: I1203 12:19:57.990903 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-cert\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092319 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp7f9\" (UniqueName: \"kubernetes.io/projected/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-kube-api-access-wp7f9\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092369 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-startup\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092395 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092419 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-conf\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092443 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-cert\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092483 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n78dw\" (UniqueName: \"kubernetes.io/projected/18d17b40-9915-4646-a939-0c23134df87e-kube-api-access-n78dw\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092508 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/18d17b40-9915-4646-a939-0c23134df87e-metallb-excludel2\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092529 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-reloader\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: E1203 12:19:58.092529 4591 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092549 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-metrics-certs\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092581 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p55w5\" (UniqueName: \"kubernetes.io/projected/9bf70e0c-4a12-4965-85c9-212b85622dc0-kube-api-access-p55w5\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: E1203 12:19:58.092597 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist podName:18d17b40-9915-4646-a939-0c23134df87e nodeName:}" failed. No retries permitted until 2025-12-03 12:19:58.592581615 +0000 UTC m=+896.019621375 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist") pod "speaker-sk4bx" (UID: "18d17b40-9915-4646-a939-0c23134df87e") : secret "metallb-memberlist" not found Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092627 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxpz\" (UniqueName: \"kubernetes.io/projected/102945b3-6f0d-43b1-b347-7a7fd637ce88-kube-api-access-ppxpz\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092698 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-sockets\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092741 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092773 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092854 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-metrics-certs\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092868 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics-certs\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.092902 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-conf\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.093389 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-reloader\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.093464 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.093483 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-startup\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.093818 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/18d17b40-9915-4646-a939-0c23134df87e-metallb-excludel2\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.094645 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bf70e0c-4a12-4965-85c9-212b85622dc0-frr-sockets\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.098572 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bf70e0c-4a12-4965-85c9-212b85622dc0-metrics-certs\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.098677 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-cert\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.098868 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.099084 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/102945b3-6f0d-43b1-b347-7a7fd637ce88-metrics-certs\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.099337 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-metrics-certs\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.109618 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxpz\" (UniqueName: \"kubernetes.io/projected/102945b3-6f0d-43b1-b347-7a7fd637ce88-kube-api-access-ppxpz\") pod \"controller-f8648f98b-t29t8\" (UID: \"102945b3-6f0d-43b1-b347-7a7fd637ce88\") " pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.109942 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp7f9\" (UniqueName: \"kubernetes.io/projected/09e7c06e-c8f7-42c8-81c1-dda14b5d609e-kube-api-access-wp7f9\") pod \"frr-k8s-webhook-server-7fcb986d4-r82b4\" (UID: \"09e7c06e-c8f7-42c8-81c1-dda14b5d609e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.110967 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p55w5\" (UniqueName: \"kubernetes.io/projected/9bf70e0c-4a12-4965-85c9-212b85622dc0-kube-api-access-p55w5\") pod \"frr-k8s-czlhw\" (UID: \"9bf70e0c-4a12-4965-85c9-212b85622dc0\") " pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.112317 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n78dw\" (UniqueName: \"kubernetes.io/projected/18d17b40-9915-4646-a939-0c23134df87e-kube-api-access-n78dw\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.130279 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.134754 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.200389 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.519946 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4"] Dec 03 12:19:58 crc kubenswrapper[4591]: W1203 12:19:58.521632 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09e7c06e_c8f7_42c8_81c1_dda14b5d609e.slice/crio-690789eab3e7394cad07f75927aa06ea6d7132a58e24caa0a2fde965df4805b8 WatchSource:0}: Error finding container 690789eab3e7394cad07f75927aa06ea6d7132a58e24caa0a2fde965df4805b8: Status 404 returned error can't find the container with id 690789eab3e7394cad07f75927aa06ea6d7132a58e24caa0a2fde965df4805b8 Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.589264 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-t29t8"] Dec 03 12:19:58 crc kubenswrapper[4591]: W1203 12:19:58.590886 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod102945b3_6f0d_43b1_b347_7a7fd637ce88.slice/crio-8c7d62031eae5b25728e40f42dcb88b111c37db5055e9134a136b257917f01be WatchSource:0}: Error finding container 8c7d62031eae5b25728e40f42dcb88b111c37db5055e9134a136b257917f01be: Status 404 returned error can't find the container with id 8c7d62031eae5b25728e40f42dcb88b111c37db5055e9134a136b257917f01be Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.603311 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:58 crc kubenswrapper[4591]: E1203 12:19:58.603445 4591 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 12:19:58 crc kubenswrapper[4591]: E1203 12:19:58.603495 4591 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist podName:18d17b40-9915-4646-a939-0c23134df87e nodeName:}" failed. No retries permitted until 2025-12-03 12:19:59.603482741 +0000 UTC m=+897.030522511 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist") pod "speaker-sk4bx" (UID: "18d17b40-9915-4646-a939-0c23134df87e") : secret "metallb-memberlist" not found Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.798332 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" event={"ID":"09e7c06e-c8f7-42c8-81c1-dda14b5d609e","Type":"ContainerStarted","Data":"690789eab3e7394cad07f75927aa06ea6d7132a58e24caa0a2fde965df4805b8"} Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.799392 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"d912caa03e49fa6f17c5e4f8e23468d0190536413e48f4a1c7b7880eaa34083e"} Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.800812 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-t29t8" event={"ID":"102945b3-6f0d-43b1-b347-7a7fd637ce88","Type":"ContainerStarted","Data":"883de9c6d40daa5d71e579650edf4829f4fdbf0ea722a5bb888ee819dd6938f1"} Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.800839 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-t29t8" event={"ID":"102945b3-6f0d-43b1-b347-7a7fd637ce88","Type":"ContainerStarted","Data":"d83bed7481a87b83324a7e3308b7ad5f00dfd87b5b9519f011fafb00871f1da2"} Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.800850 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-t29t8" event={"ID":"102945b3-6f0d-43b1-b347-7a7fd637ce88","Type":"ContainerStarted","Data":"8c7d62031eae5b25728e40f42dcb88b111c37db5055e9134a136b257917f01be"} Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.800945 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:19:58 crc kubenswrapper[4591]: I1203 12:19:58.814665 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-t29t8" podStartSLOduration=1.8146540899999999 podStartE2EDuration="1.81465409s" podCreationTimestamp="2025-12-03 12:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:19:58.811265207 +0000 UTC m=+896.238304978" watchObservedRunningTime="2025-12-03 12:19:58.81465409 +0000 UTC m=+896.241693861" Dec 03 12:19:59 crc kubenswrapper[4591]: I1203 12:19:59.617362 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:59 crc kubenswrapper[4591]: I1203 12:19:59.622184 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/18d17b40-9915-4646-a939-0c23134df87e-memberlist\") pod \"speaker-sk4bx\" (UID: \"18d17b40-9915-4646-a939-0c23134df87e\") " pod="metallb-system/speaker-sk4bx" Dec 03 12:19:59 crc kubenswrapper[4591]: I1203 12:19:59.693783 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-sk4bx" Dec 03 12:19:59 crc kubenswrapper[4591]: W1203 12:19:59.721205 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18d17b40_9915_4646_a939_0c23134df87e.slice/crio-edf4b9f4126ba50e3b21e3b11a384ac76ee0d1097461e9e4ec28b0f5860851be WatchSource:0}: Error finding container edf4b9f4126ba50e3b21e3b11a384ac76ee0d1097461e9e4ec28b0f5860851be: Status 404 returned error can't find the container with id edf4b9f4126ba50e3b21e3b11a384ac76ee0d1097461e9e4ec28b0f5860851be Dec 03 12:19:59 crc kubenswrapper[4591]: I1203 12:19:59.808766 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sk4bx" event={"ID":"18d17b40-9915-4646-a939-0c23134df87e","Type":"ContainerStarted","Data":"edf4b9f4126ba50e3b21e3b11a384ac76ee0d1097461e9e4ec28b0f5860851be"} Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.129894 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.130149 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.161788 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.817280 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sk4bx" event={"ID":"18d17b40-9915-4646-a939-0c23134df87e","Type":"ContainerStarted","Data":"ea5743f29a56fdafcdd2d11f4d94568d6b73f84505cda31e0c9b491952fa8e74"} Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.817338 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-sk4bx" event={"ID":"18d17b40-9915-4646-a939-0c23134df87e","Type":"ContainerStarted","Data":"a7f738d1dfaca8d4a1a18e50bb107a0460e3a87bd2e6a0dedb1507ae67ee7917"} Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.817575 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-sk4bx" Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.835905 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-sk4bx" podStartSLOduration=3.835887586 podStartE2EDuration="3.835887586s" podCreationTimestamp="2025-12-03 12:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:20:00.82979074 +0000 UTC m=+898.256830510" watchObservedRunningTime="2025-12-03 12:20:00.835887586 +0000 UTC m=+898.262927355" Dec 03 12:20:00 crc kubenswrapper[4591]: I1203 12:20:00.870738 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:02 crc kubenswrapper[4591]: I1203 12:20:02.405394 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:20:02 crc kubenswrapper[4591]: I1203 12:20:02.831099 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l7kft" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="registry-server" containerID="cri-o://cef21809b589e26a7781a389ff6a6fb2099bdcd18483c05006e526cd11c19af3" gracePeriod=2 Dec 03 12:20:03 crc kubenswrapper[4591]: I1203 12:20:03.840629 4591 generic.go:334] "Generic (PLEG): container finished" podID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerID="cef21809b589e26a7781a389ff6a6fb2099bdcd18483c05006e526cd11c19af3" exitCode=0 Dec 03 12:20:03 crc kubenswrapper[4591]: I1203 12:20:03.840679 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerDied","Data":"cef21809b589e26a7781a389ff6a6fb2099bdcd18483c05006e526cd11c19af3"} Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.083773 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.149120 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdq9f\" (UniqueName: \"kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f\") pod \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.149258 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content\") pod \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.149418 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities\") pod \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\" (UID: \"44da941a-421d-4d48-ad3e-fd6fc9a5733e\") " Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.150222 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities" (OuterVolumeSpecName: "utilities") pod "44da941a-421d-4d48-ad3e-fd6fc9a5733e" (UID: "44da941a-421d-4d48-ad3e-fd6fc9a5733e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.155021 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f" (OuterVolumeSpecName: "kube-api-access-vdq9f") pod "44da941a-421d-4d48-ad3e-fd6fc9a5733e" (UID: "44da941a-421d-4d48-ad3e-fd6fc9a5733e"). InnerVolumeSpecName "kube-api-access-vdq9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.183966 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44da941a-421d-4d48-ad3e-fd6fc9a5733e" (UID: "44da941a-421d-4d48-ad3e-fd6fc9a5733e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.250929 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdq9f\" (UniqueName: \"kubernetes.io/projected/44da941a-421d-4d48-ad3e-fd6fc9a5733e-kube-api-access-vdq9f\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.250972 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.250983 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44da941a-421d-4d48-ad3e-fd6fc9a5733e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.611822 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:05 crc kubenswrapper[4591]: E1203 12:20:05.612119 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="extract-content" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.612132 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="extract-content" Dec 03 12:20:05 crc kubenswrapper[4591]: E1203 12:20:05.612159 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="extract-utilities" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.612166 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="extract-utilities" Dec 03 12:20:05 crc kubenswrapper[4591]: E1203 12:20:05.612193 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="registry-server" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.612200 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="registry-server" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.612328 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" containerName="registry-server" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.613449 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.620258 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.656598 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.656646 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.656796 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb67s\" (UniqueName: \"kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.759035 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb67s\" (UniqueName: \"kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.759296 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.759356 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.759774 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.759844 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.778434 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb67s\" (UniqueName: \"kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s\") pod \"redhat-marketplace-884jm\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.857473 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l7kft" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.857520 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l7kft" event={"ID":"44da941a-421d-4d48-ad3e-fd6fc9a5733e","Type":"ContainerDied","Data":"55f8e4d228552580eb3dd2beede7056df068d4f344f17ff6555c982635404a78"} Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.857581 4591 scope.go:117] "RemoveContainer" containerID="cef21809b589e26a7781a389ff6a6fb2099bdcd18483c05006e526cd11c19af3" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.858970 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" event={"ID":"09e7c06e-c8f7-42c8-81c1-dda14b5d609e","Type":"ContainerStarted","Data":"cabf62defa5ecc9f0ffabd2d97080c2905e260fc37e7d6e58a30217ae4b8152e"} Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.859107 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.860953 4591 generic.go:334] "Generic (PLEG): container finished" podID="9bf70e0c-4a12-4965-85c9-212b85622dc0" containerID="8f5f21708a20bcda8baddbf6853c812c1cb69935731dc8aa8368a94d00feb8e9" exitCode=0 Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.860991 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerDied","Data":"8f5f21708a20bcda8baddbf6853c812c1cb69935731dc8aa8368a94d00feb8e9"} Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.894187 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" podStartSLOduration=2.543799085 podStartE2EDuration="8.894171209s" podCreationTimestamp="2025-12-03 12:19:57 +0000 UTC" firstStartedPulling="2025-12-03 12:19:58.523486775 +0000 UTC m=+895.950526545" lastFinishedPulling="2025-12-03 12:20:04.873858909 +0000 UTC m=+902.300898669" observedRunningTime="2025-12-03 12:20:05.891728486 +0000 UTC m=+903.318768257" watchObservedRunningTime="2025-12-03 12:20:05.894171209 +0000 UTC m=+903.321210980" Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.906288 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.910101 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l7kft"] Dec 03 12:20:05 crc kubenswrapper[4591]: I1203 12:20:05.925716 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.388192 4591 scope.go:117] "RemoveContainer" containerID="5af892f1028ad7cd77a8c1a743cd0ac9f9fc0566b141102eff08bdc947db43f1" Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.411573 4591 scope.go:117] "RemoveContainer" containerID="e8742dc42de67d22aebb125753a67c8c57fe96ec985f3babd8bdccd6a91fd2b8" Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.768478 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:06 crc kubenswrapper[4591]: W1203 12:20:06.772244 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod809d1a94_3e0e_487c_a47a_c4197c505bfb.slice/crio-9d74d51c5d6cd24e62967c011e049a24a4667a10eec600aa699bbfb5b29359bd WatchSource:0}: Error finding container 9d74d51c5d6cd24e62967c011e049a24a4667a10eec600aa699bbfb5b29359bd: Status 404 returned error can't find the container with id 9d74d51c5d6cd24e62967c011e049a24a4667a10eec600aa699bbfb5b29359bd Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.870710 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerStarted","Data":"9d74d51c5d6cd24e62967c011e049a24a4667a10eec600aa699bbfb5b29359bd"} Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.873355 4591 generic.go:334] "Generic (PLEG): container finished" podID="9bf70e0c-4a12-4965-85c9-212b85622dc0" containerID="bf31e2b457cf94e48d247ff3426aaf78f2029572745fd2758bfd35a39b4ba838" exitCode=0 Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.873468 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerDied","Data":"bf31e2b457cf94e48d247ff3426aaf78f2029572745fd2758bfd35a39b4ba838"} Dec 03 12:20:06 crc kubenswrapper[4591]: I1203 12:20:06.899444 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44da941a-421d-4d48-ad3e-fd6fc9a5733e" path="/var/lib/kubelet/pods/44da941a-421d-4d48-ad3e-fd6fc9a5733e/volumes" Dec 03 12:20:07 crc kubenswrapper[4591]: I1203 12:20:07.882847 4591 generic.go:334] "Generic (PLEG): container finished" podID="9bf70e0c-4a12-4965-85c9-212b85622dc0" containerID="854f3976e69c14a895a70e7e29ccde822dd509d98084f06e5eeff70014c90db7" exitCode=0 Dec 03 12:20:07 crc kubenswrapper[4591]: I1203 12:20:07.882919 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerDied","Data":"854f3976e69c14a895a70e7e29ccde822dd509d98084f06e5eeff70014c90db7"} Dec 03 12:20:07 crc kubenswrapper[4591]: I1203 12:20:07.884522 4591 generic.go:334] "Generic (PLEG): container finished" podID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerID="220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f" exitCode=0 Dec 03 12:20:07 crc kubenswrapper[4591]: I1203 12:20:07.884557 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerDied","Data":"220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.204083 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-t29t8" Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.893091 4591 generic.go:334] "Generic (PLEG): container finished" podID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerID="d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256" exitCode=0 Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.896325 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerDied","Data":"d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898462 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"40d147e1425a0f306797837bd55cbe56b130e4aa857b9b14d268c2aa265791c5"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898504 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"29de0ed31246d4dfe8e6300a81946ebd4590dafdc67cb995be7ec33f1b0455bd"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898515 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"1028a3a8ffc29bdb52e1b968de941232fd0283a45520f10f7d42920910e15c70"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898525 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"9ca185a00d6ac07a65232c7db49d4f5464c40477c4a0b401d74b01145cd639a6"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898534 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"8850751faa16f62f44704de7623de5093cfa854d52fa3b42b97f0a00c1deb4ed"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898542 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-czlhw" event={"ID":"9bf70e0c-4a12-4965-85c9-212b85622dc0","Type":"ContainerStarted","Data":"74285ac3420287e26ca296efa2c18d353b1a8c9f33985785cac039524585ea15"} Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.898634 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:20:08 crc kubenswrapper[4591]: I1203 12:20:08.927234 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-czlhw" podStartSLOduration=5.34394787 podStartE2EDuration="11.927217641s" podCreationTimestamp="2025-12-03 12:19:57 +0000 UTC" firstStartedPulling="2025-12-03 12:19:58.294722872 +0000 UTC m=+895.721762642" lastFinishedPulling="2025-12-03 12:20:04.877992643 +0000 UTC m=+902.305032413" observedRunningTime="2025-12-03 12:20:08.924461187 +0000 UTC m=+906.351500958" watchObservedRunningTime="2025-12-03 12:20:08.927217641 +0000 UTC m=+906.354257412" Dec 03 12:20:09 crc kubenswrapper[4591]: I1203 12:20:09.696391 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-sk4bx" Dec 03 12:20:09 crc kubenswrapper[4591]: I1203 12:20:09.914464 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerStarted","Data":"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f"} Dec 03 12:20:09 crc kubenswrapper[4591]: I1203 12:20:09.933497 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-884jm" podStartSLOduration=3.166042001 podStartE2EDuration="4.933479239s" podCreationTimestamp="2025-12-03 12:20:05 +0000 UTC" firstStartedPulling="2025-12-03 12:20:07.885463388 +0000 UTC m=+905.312503158" lastFinishedPulling="2025-12-03 12:20:09.652900626 +0000 UTC m=+907.079940396" observedRunningTime="2025-12-03 12:20:09.929413113 +0000 UTC m=+907.356452882" watchObservedRunningTime="2025-12-03 12:20:09.933479239 +0000 UTC m=+907.360518998" Dec 03 12:20:13 crc kubenswrapper[4591]: I1203 12:20:13.131335 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:20:13 crc kubenswrapper[4591]: I1203 12:20:13.161158 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:20:15 crc kubenswrapper[4591]: I1203 12:20:15.926320 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:15 crc kubenswrapper[4591]: I1203 12:20:15.926571 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:15 crc kubenswrapper[4591]: I1203 12:20:15.961595 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:15 crc kubenswrapper[4591]: I1203 12:20:15.994380 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.412346 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-v5fln"] Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.413349 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-v5fln" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.419804 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-v5fln"] Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.420396 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.420743 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-kzlh2" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.421125 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.530902 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p6rv\" (UniqueName: \"kubernetes.io/projected/9b97239d-8d0f-443b-bc87-4c56b518bab3-kube-api-access-6p6rv\") pod \"openstack-operator-index-v5fln\" (UID: \"9b97239d-8d0f-443b-bc87-4c56b518bab3\") " pod="openstack-operators/openstack-operator-index-v5fln" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.633212 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p6rv\" (UniqueName: \"kubernetes.io/projected/9b97239d-8d0f-443b-bc87-4c56b518bab3-kube-api-access-6p6rv\") pod \"openstack-operator-index-v5fln\" (UID: \"9b97239d-8d0f-443b-bc87-4c56b518bab3\") " pod="openstack-operators/openstack-operator-index-v5fln" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.650043 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p6rv\" (UniqueName: \"kubernetes.io/projected/9b97239d-8d0f-443b-bc87-4c56b518bab3-kube-api-access-6p6rv\") pod \"openstack-operator-index-v5fln\" (UID: \"9b97239d-8d0f-443b-bc87-4c56b518bab3\") " pod="openstack-operators/openstack-operator-index-v5fln" Dec 03 12:20:16 crc kubenswrapper[4591]: I1203 12:20:16.726805 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-v5fln" Dec 03 12:20:17 crc kubenswrapper[4591]: I1203 12:20:17.095846 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-v5fln"] Dec 03 12:20:17 crc kubenswrapper[4591]: I1203 12:20:17.964370 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-v5fln" event={"ID":"9b97239d-8d0f-443b-bc87-4c56b518bab3","Type":"ContainerStarted","Data":"4fac3be9728bb36cf50f657245ac9485d1bb0f7752a7b39a4b3515088e4c02c1"} Dec 03 12:20:18 crc kubenswrapper[4591]: I1203 12:20:18.135495 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-czlhw" Dec 03 12:20:18 crc kubenswrapper[4591]: I1203 12:20:18.139003 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-r82b4" Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.814602 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.816167 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.826982 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.921007 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.921138 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxqbk\" (UniqueName: \"kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:21 crc kubenswrapper[4591]: I1203 12:20:21.921250 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.022741 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.022831 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxqbk\" (UniqueName: \"kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.022914 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.023269 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.023399 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.040056 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxqbk\" (UniqueName: \"kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk\") pod \"community-operators-ld6zp\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.137970 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:22 crc kubenswrapper[4591]: I1203 12:20:22.565455 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:22 crc kubenswrapper[4591]: W1203 12:20:22.569057 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa9e2412_7ec4_42d3_b70b_815b188aec80.slice/crio-cbdfdabd28522b548675c1df370a34870996edeb0b27b85422bd451bb3cd64db WatchSource:0}: Error finding container cbdfdabd28522b548675c1df370a34870996edeb0b27b85422bd451bb3cd64db: Status 404 returned error can't find the container with id cbdfdabd28522b548675c1df370a34870996edeb0b27b85422bd451bb3cd64db Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.000496 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerID="18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c" exitCode=0 Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.000561 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerDied","Data":"18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c"} Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.000795 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerStarted","Data":"cbdfdabd28522b548675c1df370a34870996edeb0b27b85422bd451bb3cd64db"} Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.007884 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.008153 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-884jm" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="registry-server" containerID="cri-o://860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f" gracePeriod=2 Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.380551 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.468996 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content\") pod \"809d1a94-3e0e-487c-a47a-c4197c505bfb\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.469209 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb67s\" (UniqueName: \"kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s\") pod \"809d1a94-3e0e-487c-a47a-c4197c505bfb\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.469283 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities\") pod \"809d1a94-3e0e-487c-a47a-c4197c505bfb\" (UID: \"809d1a94-3e0e-487c-a47a-c4197c505bfb\") " Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.470162 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities" (OuterVolumeSpecName: "utilities") pod "809d1a94-3e0e-487c-a47a-c4197c505bfb" (UID: "809d1a94-3e0e-487c-a47a-c4197c505bfb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.475477 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s" (OuterVolumeSpecName: "kube-api-access-nb67s") pod "809d1a94-3e0e-487c-a47a-c4197c505bfb" (UID: "809d1a94-3e0e-487c-a47a-c4197c505bfb"). InnerVolumeSpecName "kube-api-access-nb67s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.483468 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "809d1a94-3e0e-487c-a47a-c4197c505bfb" (UID: "809d1a94-3e0e-487c-a47a-c4197c505bfb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.570607 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.570712 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/809d1a94-3e0e-487c-a47a-c4197c505bfb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:23 crc kubenswrapper[4591]: I1203 12:20:23.570786 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb67s\" (UniqueName: \"kubernetes.io/projected/809d1a94-3e0e-487c-a47a-c4197c505bfb-kube-api-access-nb67s\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.015905 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerID="0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424" exitCode=0 Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.016108 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerDied","Data":"0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424"} Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.026993 4591 generic.go:334] "Generic (PLEG): container finished" podID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerID="860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f" exitCode=0 Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.027029 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerDied","Data":"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f"} Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.027088 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-884jm" event={"ID":"809d1a94-3e0e-487c-a47a-c4197c505bfb","Type":"ContainerDied","Data":"9d74d51c5d6cd24e62967c011e049a24a4667a10eec600aa699bbfb5b29359bd"} Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.027098 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-884jm" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.027109 4591 scope.go:117] "RemoveContainer" containerID="860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.045369 4591 scope.go:117] "RemoveContainer" containerID="d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.053673 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.058660 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-884jm"] Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.074797 4591 scope.go:117] "RemoveContainer" containerID="220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.089315 4591 scope.go:117] "RemoveContainer" containerID="860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f" Dec 03 12:20:24 crc kubenswrapper[4591]: E1203 12:20:24.089688 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f\": container with ID starting with 860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f not found: ID does not exist" containerID="860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.089775 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f"} err="failed to get container status \"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f\": rpc error: code = NotFound desc = could not find container \"860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f\": container with ID starting with 860c2870cb9b9825ca8d79cb8e2cdb3c70689d1e7e8ccb6528f7e77560b4094f not found: ID does not exist" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.089851 4591 scope.go:117] "RemoveContainer" containerID="d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256" Dec 03 12:20:24 crc kubenswrapper[4591]: E1203 12:20:24.090212 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256\": container with ID starting with d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256 not found: ID does not exist" containerID="d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.090239 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256"} err="failed to get container status \"d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256\": rpc error: code = NotFound desc = could not find container \"d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256\": container with ID starting with d5fef239e456f59d64517d4413d170cca98d61b47e3071144820bdd78b834256 not found: ID does not exist" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.090266 4591 scope.go:117] "RemoveContainer" containerID="220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f" Dec 03 12:20:24 crc kubenswrapper[4591]: E1203 12:20:24.090493 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f\": container with ID starting with 220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f not found: ID does not exist" containerID="220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.090560 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f"} err="failed to get container status \"220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f\": rpc error: code = NotFound desc = could not find container \"220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f\": container with ID starting with 220ca0b3040d90b5811d5859c3dbb0fdd9d446bcb0776c0cb7a93b769a941c7f not found: ID does not exist" Dec 03 12:20:24 crc kubenswrapper[4591]: I1203 12:20:24.896695 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" path="/var/lib/kubelet/pods/809d1a94-3e0e-487c-a47a-c4197c505bfb/volumes" Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.034686 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerStarted","Data":"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac"} Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.049737 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ld6zp" podStartSLOduration=2.542946128 podStartE2EDuration="4.049722769s" podCreationTimestamp="2025-12-03 12:20:21 +0000 UTC" firstStartedPulling="2025-12-03 12:20:23.002109526 +0000 UTC m=+920.429149296" lastFinishedPulling="2025-12-03 12:20:24.508886167 +0000 UTC m=+921.935925937" observedRunningTime="2025-12-03 12:20:25.04835693 +0000 UTC m=+922.475396700" watchObservedRunningTime="2025-12-03 12:20:25.049722769 +0000 UTC m=+922.476762539" Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.300231 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.300300 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.300343 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.300765 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:20:25 crc kubenswrapper[4591]: I1203 12:20:25.300819 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431" gracePeriod=600 Dec 03 12:20:26 crc kubenswrapper[4591]: I1203 12:20:26.044033 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431" exitCode=0 Dec 03 12:20:26 crc kubenswrapper[4591]: I1203 12:20:26.044094 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431"} Dec 03 12:20:26 crc kubenswrapper[4591]: I1203 12:20:26.044658 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c"} Dec 03 12:20:26 crc kubenswrapper[4591]: I1203 12:20:26.044679 4591 scope.go:117] "RemoveContainer" containerID="d3f2548089882317f55d570c4b96ca8c02b125668122d2d842c90e0bdabc40c7" Dec 03 12:20:32 crc kubenswrapper[4591]: I1203 12:20:32.138820 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:32 crc kubenswrapper[4591]: I1203 12:20:32.139528 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:32 crc kubenswrapper[4591]: I1203 12:20:32.176174 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:33 crc kubenswrapper[4591]: I1203 12:20:33.131113 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:34 crc kubenswrapper[4591]: I1203 12:20:34.006861 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.108377 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ld6zp" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="registry-server" containerID="cri-o://26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac" gracePeriod=2 Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.472683 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.670209 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxqbk\" (UniqueName: \"kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk\") pod \"fa9e2412-7ec4-42d3-b70b-815b188aec80\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.670274 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities\") pod \"fa9e2412-7ec4-42d3-b70b-815b188aec80\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.670326 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content\") pod \"fa9e2412-7ec4-42d3-b70b-815b188aec80\" (UID: \"fa9e2412-7ec4-42d3-b70b-815b188aec80\") " Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.671105 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities" (OuterVolumeSpecName: "utilities") pod "fa9e2412-7ec4-42d3-b70b-815b188aec80" (UID: "fa9e2412-7ec4-42d3-b70b-815b188aec80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.677204 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk" (OuterVolumeSpecName: "kube-api-access-qxqbk") pod "fa9e2412-7ec4-42d3-b70b-815b188aec80" (UID: "fa9e2412-7ec4-42d3-b70b-815b188aec80"). InnerVolumeSpecName "kube-api-access-qxqbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.722525 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa9e2412-7ec4-42d3-b70b-815b188aec80" (UID: "fa9e2412-7ec4-42d3-b70b-815b188aec80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.772230 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxqbk\" (UniqueName: \"kubernetes.io/projected/fa9e2412-7ec4-42d3-b70b-815b188aec80-kube-api-access-qxqbk\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.772307 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:35 crc kubenswrapper[4591]: I1203 12:20:35.772318 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa9e2412-7ec4-42d3-b70b-815b188aec80-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.120036 4591 generic.go:334] "Generic (PLEG): container finished" podID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerID="26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac" exitCode=0 Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.120110 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ld6zp" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.120129 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerDied","Data":"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac"} Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.120187 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ld6zp" event={"ID":"fa9e2412-7ec4-42d3-b70b-815b188aec80","Type":"ContainerDied","Data":"cbdfdabd28522b548675c1df370a34870996edeb0b27b85422bd451bb3cd64db"} Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.120211 4591 scope.go:117] "RemoveContainer" containerID="26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.141521 4591 scope.go:117] "RemoveContainer" containerID="0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.149372 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.153787 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ld6zp"] Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.176089 4591 scope.go:117] "RemoveContainer" containerID="18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.188566 4591 scope.go:117] "RemoveContainer" containerID="26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac" Dec 03 12:20:36 crc kubenswrapper[4591]: E1203 12:20:36.188864 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac\": container with ID starting with 26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac not found: ID does not exist" containerID="26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.188896 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac"} err="failed to get container status \"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac\": rpc error: code = NotFound desc = could not find container \"26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac\": container with ID starting with 26c1fef544b9007380f74ef6d89917e52d9e46b714a51313628988acfb47d9ac not found: ID does not exist" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.188915 4591 scope.go:117] "RemoveContainer" containerID="0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424" Dec 03 12:20:36 crc kubenswrapper[4591]: E1203 12:20:36.189278 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424\": container with ID starting with 0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424 not found: ID does not exist" containerID="0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.189302 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424"} err="failed to get container status \"0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424\": rpc error: code = NotFound desc = could not find container \"0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424\": container with ID starting with 0486f09363ce5fd26dd4e67c592ef54cb7d99c756d6aa4184a752f446780f424 not found: ID does not exist" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.189316 4591 scope.go:117] "RemoveContainer" containerID="18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c" Dec 03 12:20:36 crc kubenswrapper[4591]: E1203 12:20:36.189655 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c\": container with ID starting with 18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c not found: ID does not exist" containerID="18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.189709 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c"} err="failed to get container status \"18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c\": rpc error: code = NotFound desc = could not find container \"18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c\": container with ID starting with 18aa865bdfc77926726011907444b361455144357010eca73bda3812f7b1453c not found: ID does not exist" Dec 03 12:20:36 crc kubenswrapper[4591]: I1203 12:20:36.899558 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" path="/var/lib/kubelet/pods/fa9e2412-7ec4-42d3-b70b-815b188aec80/volumes" Dec 03 12:22:17 crc kubenswrapper[4591]: E1203 12:22:17.105456 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:22:17 crc kubenswrapper[4591]: E1203 12:22:17.105905 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:22:17 crc kubenswrapper[4591]: E1203 12:22:17.106030 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:22:17 crc kubenswrapper[4591]: E1203 12:22:17.107233 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:22:17 crc kubenswrapper[4591]: E1203 12:22:17.733467 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:22:25 crc kubenswrapper[4591]: I1203 12:22:25.299715 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:22:25 crc kubenswrapper[4591]: I1203 12:22:25.300201 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:22:55 crc kubenswrapper[4591]: I1203 12:22:55.300085 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:22:55 crc kubenswrapper[4591]: I1203 12:22:55.300442 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:23:25 crc kubenswrapper[4591]: I1203 12:23:25.300157 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:23:25 crc kubenswrapper[4591]: I1203 12:23:25.300679 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:23:25 crc kubenswrapper[4591]: I1203 12:23:25.300730 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:23:25 crc kubenswrapper[4591]: I1203 12:23:25.301330 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:23:25 crc kubenswrapper[4591]: I1203 12:23:25.301391 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c" gracePeriod=600 Dec 03 12:23:26 crc kubenswrapper[4591]: I1203 12:23:26.111225 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c" exitCode=0 Dec 03 12:23:26 crc kubenswrapper[4591]: I1203 12:23:26.111302 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c"} Dec 03 12:23:26 crc kubenswrapper[4591]: I1203 12:23:26.111711 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3"} Dec 03 12:23:26 crc kubenswrapper[4591]: I1203 12:23:26.111736 4591 scope.go:117] "RemoveContainer" containerID="5d4572b6f024b0619a91840ddb04380962109d71dc4c14f5c0eae7f89d47c431" Dec 03 12:24:31 crc kubenswrapper[4591]: E1203 12:24:31.897687 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:24:31 crc kubenswrapper[4591]: E1203 12:24:31.898891 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:24:31 crc kubenswrapper[4591]: E1203 12:24:31.899144 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:24:31 crc kubenswrapper[4591]: E1203 12:24:31.900401 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:24:44 crc kubenswrapper[4591]: E1203 12:24:44.892480 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:24:57 crc kubenswrapper[4591]: I1203 12:24:57.893022 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:25:25 crc kubenswrapper[4591]: I1203 12:25:25.299810 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:25:25 crc kubenswrapper[4591]: I1203 12:25:25.300240 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:25:55 crc kubenswrapper[4591]: I1203 12:25:55.299817 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:25:55 crc kubenswrapper[4591]: I1203 12:25:55.300534 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:26:25 crc kubenswrapper[4591]: I1203 12:26:25.299136 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:26:25 crc kubenswrapper[4591]: I1203 12:26:25.299562 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:26:25 crc kubenswrapper[4591]: I1203 12:26:25.299603 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:26:25 crc kubenswrapper[4591]: I1203 12:26:25.299995 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:26:25 crc kubenswrapper[4591]: I1203 12:26:25.300039 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3" gracePeriod=600 Dec 03 12:26:26 crc kubenswrapper[4591]: I1203 12:26:26.152992 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3" exitCode=0 Dec 03 12:26:26 crc kubenswrapper[4591]: I1203 12:26:26.153085 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3"} Dec 03 12:26:26 crc kubenswrapper[4591]: I1203 12:26:26.153406 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475"} Dec 03 12:26:26 crc kubenswrapper[4591]: I1203 12:26:26.153428 4591 scope.go:117] "RemoveContainer" containerID="489b9d9b9bc7af5c2a8844b4f16e7376a5f918b18e3ce09aadd022b7d844e39c" Dec 03 12:26:57 crc kubenswrapper[4591]: E1203 12:26:57.897531 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:26:57 crc kubenswrapper[4591]: E1203 12:26:57.897988 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:26:57 crc kubenswrapper[4591]: E1203 12:26:57.898281 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:26:57 crc kubenswrapper[4591]: E1203 12:26:57.899703 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:27:08 crc kubenswrapper[4591]: E1203 12:27:08.893872 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:27:22 crc kubenswrapper[4591]: E1203 12:27:22.894684 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:27:37 crc kubenswrapper[4591]: E1203 12:27:37.892190 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:28:25 crc kubenswrapper[4591]: I1203 12:28:25.299850 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:28:25 crc kubenswrapper[4591]: I1203 12:28:25.300322 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:28:55 crc kubenswrapper[4591]: I1203 12:28:55.300095 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:28:55 crc kubenswrapper[4591]: I1203 12:28:55.300554 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:29:25 crc kubenswrapper[4591]: I1203 12:29:25.299854 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:29:25 crc kubenswrapper[4591]: I1203 12:29:25.300273 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:29:25 crc kubenswrapper[4591]: I1203 12:29:25.300312 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:29:25 crc kubenswrapper[4591]: I1203 12:29:25.300724 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:29:25 crc kubenswrapper[4591]: I1203 12:29:25.300773 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" gracePeriod=600 Dec 03 12:29:25 crc kubenswrapper[4591]: E1203 12:29:25.415648 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:29:26 crc kubenswrapper[4591]: I1203 12:29:26.146654 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" exitCode=0 Dec 03 12:29:26 crc kubenswrapper[4591]: I1203 12:29:26.146720 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475"} Dec 03 12:29:26 crc kubenswrapper[4591]: I1203 12:29:26.146785 4591 scope.go:117] "RemoveContainer" containerID="1777f2a4a6ff6489f893ac201f94635af72fa785daaa53dd7a83e9c59bd91fd3" Dec 03 12:29:26 crc kubenswrapper[4591]: I1203 12:29:26.147251 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:29:26 crc kubenswrapper[4591]: E1203 12:29:26.147538 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:29:40 crc kubenswrapper[4591]: I1203 12:29:40.890603 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:29:40 crc kubenswrapper[4591]: E1203 12:29:40.891216 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:29:49 crc kubenswrapper[4591]: E1203 12:29:49.896765 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:29:49 crc kubenswrapper[4591]: E1203 12:29:49.897234 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:29:49 crc kubenswrapper[4591]: E1203 12:29:49.897347 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:29:49 crc kubenswrapper[4591]: E1203 12:29:49.898477 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:29:54 crc kubenswrapper[4591]: I1203 12:29:54.890975 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:29:54 crc kubenswrapper[4591]: E1203 12:29:54.891737 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.130418 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb"] Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131379 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="extract-content" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131394 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="extract-content" Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131451 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131457 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131464 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="extract-utilities" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131471 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="extract-utilities" Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131486 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="extract-content" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131491 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="extract-content" Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131502 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131509 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: E1203 12:30:00.131520 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="extract-utilities" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131527 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="extract-utilities" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131824 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="809d1a94-3e0e-487c-a47a-c4197c505bfb" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.131851 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9e2412-7ec4-42d3-b70b-815b188aec80" containerName="registry-server" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.132562 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.138751 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.139246 4591 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.148289 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb"] Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.202089 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4g5c\" (UniqueName: \"kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.202130 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.202294 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.303307 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.303365 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4g5c\" (UniqueName: \"kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.303385 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.304268 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.308902 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.316342 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4g5c\" (UniqueName: \"kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c\") pod \"collect-profiles-29412750-kwtfb\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.449531 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:00 crc kubenswrapper[4591]: I1203 12:30:00.803313 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb"] Dec 03 12:30:01 crc kubenswrapper[4591]: I1203 12:30:01.333441 4591 generic.go:334] "Generic (PLEG): container finished" podID="17bfacfe-10d8-45f6-8b2a-071cbcc6a173" containerID="415c90fd3d451e5d4e0a7414aed1b4587d27869b1d80dcc2068772ec5d79dfcf" exitCode=0 Dec 03 12:30:01 crc kubenswrapper[4591]: I1203 12:30:01.333496 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" event={"ID":"17bfacfe-10d8-45f6-8b2a-071cbcc6a173","Type":"ContainerDied","Data":"415c90fd3d451e5d4e0a7414aed1b4587d27869b1d80dcc2068772ec5d79dfcf"} Dec 03 12:30:01 crc kubenswrapper[4591]: I1203 12:30:01.333688 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" event={"ID":"17bfacfe-10d8-45f6-8b2a-071cbcc6a173","Type":"ContainerStarted","Data":"1dfcbe7a3a3f56e79029ae1b8fc4d5e2f6c88d7fa2fb8b5e468079db20c9e05f"} Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.564508 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.735441 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4g5c\" (UniqueName: \"kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c\") pod \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.735798 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume\") pod \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.735934 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume\") pod \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\" (UID: \"17bfacfe-10d8-45f6-8b2a-071cbcc6a173\") " Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.736554 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume" (OuterVolumeSpecName: "config-volume") pod "17bfacfe-10d8-45f6-8b2a-071cbcc6a173" (UID: "17bfacfe-10d8-45f6-8b2a-071cbcc6a173"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.740341 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "17bfacfe-10d8-45f6-8b2a-071cbcc6a173" (UID: "17bfacfe-10d8-45f6-8b2a-071cbcc6a173"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.740451 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c" (OuterVolumeSpecName: "kube-api-access-r4g5c") pod "17bfacfe-10d8-45f6-8b2a-071cbcc6a173" (UID: "17bfacfe-10d8-45f6-8b2a-071cbcc6a173"). InnerVolumeSpecName "kube-api-access-r4g5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.837825 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4g5c\" (UniqueName: \"kubernetes.io/projected/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-kube-api-access-r4g5c\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.837860 4591 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:02 crc kubenswrapper[4591]: I1203 12:30:02.837871 4591 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/17bfacfe-10d8-45f6-8b2a-071cbcc6a173-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:03 crc kubenswrapper[4591]: I1203 12:30:03.347956 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" event={"ID":"17bfacfe-10d8-45f6-8b2a-071cbcc6a173","Type":"ContainerDied","Data":"1dfcbe7a3a3f56e79029ae1b8fc4d5e2f6c88d7fa2fb8b5e468079db20c9e05f"} Dec 03 12:30:03 crc kubenswrapper[4591]: I1203 12:30:03.348262 4591 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dfcbe7a3a3f56e79029ae1b8fc4d5e2f6c88d7fa2fb8b5e468079db20c9e05f" Dec 03 12:30:03 crc kubenswrapper[4591]: I1203 12:30:03.348034 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-kwtfb" Dec 03 12:30:04 crc kubenswrapper[4591]: E1203 12:30:04.891882 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:30:08 crc kubenswrapper[4591]: I1203 12:30:08.890449 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:30:08 crc kubenswrapper[4591]: E1203 12:30:08.890905 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:30:17 crc kubenswrapper[4591]: E1203 12:30:17.892282 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:30:21 crc kubenswrapper[4591]: I1203 12:30:21.890779 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:30:21 crc kubenswrapper[4591]: E1203 12:30:21.891551 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:30:29 crc kubenswrapper[4591]: E1203 12:30:29.892857 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:30:36 crc kubenswrapper[4591]: I1203 12:30:36.890652 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:30:36 crc kubenswrapper[4591]: E1203 12:30:36.891230 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:30:43 crc kubenswrapper[4591]: E1203 12:30:43.892680 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:30:47 crc kubenswrapper[4591]: I1203 12:30:47.891714 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:30:47 crc kubenswrapper[4591]: E1203 12:30:47.892689 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.837285 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:30:51 crc kubenswrapper[4591]: E1203 12:30:51.837928 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17bfacfe-10d8-45f6-8b2a-071cbcc6a173" containerName="collect-profiles" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.837941 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="17bfacfe-10d8-45f6-8b2a-071cbcc6a173" containerName="collect-profiles" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.838104 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="17bfacfe-10d8-45f6-8b2a-071cbcc6a173" containerName="collect-profiles" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.839640 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.855752 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.855969 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sljvb\" (UniqueName: \"kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.856084 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.859987 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.957619 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.957655 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sljvb\" (UniqueName: \"kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.957699 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.958329 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.958543 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:51 crc kubenswrapper[4591]: I1203 12:30:51.973745 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sljvb\" (UniqueName: \"kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb\") pod \"certified-operators-4bttb\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:52 crc kubenswrapper[4591]: I1203 12:30:52.160875 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:30:52 crc kubenswrapper[4591]: I1203 12:30:52.580615 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:30:52 crc kubenswrapper[4591]: I1203 12:30:52.626208 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerStarted","Data":"04985b340f1a3e79b19493f3c59058a52e201b1a580370c04b9efe3cbc1bd9cb"} Dec 03 12:30:53 crc kubenswrapper[4591]: I1203 12:30:53.633727 4591 generic.go:334] "Generic (PLEG): container finished" podID="e427c75d-bc99-4d64-94a6-df697e914784" containerID="ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b" exitCode=0 Dec 03 12:30:53 crc kubenswrapper[4591]: I1203 12:30:53.633804 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerDied","Data":"ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b"} Dec 03 12:30:53 crc kubenswrapper[4591]: I1203 12:30:53.636420 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:30:54 crc kubenswrapper[4591]: I1203 12:30:54.647378 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerStarted","Data":"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7"} Dec 03 12:30:55 crc kubenswrapper[4591]: I1203 12:30:55.654551 4591 generic.go:334] "Generic (PLEG): container finished" podID="e427c75d-bc99-4d64-94a6-df697e914784" containerID="445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7" exitCode=0 Dec 03 12:30:55 crc kubenswrapper[4591]: I1203 12:30:55.654598 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerDied","Data":"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7"} Dec 03 12:30:56 crc kubenswrapper[4591]: I1203 12:30:56.662242 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerStarted","Data":"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4"} Dec 03 12:30:56 crc kubenswrapper[4591]: I1203 12:30:56.676618 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4bttb" podStartSLOduration=3.150245656 podStartE2EDuration="5.67660079s" podCreationTimestamp="2025-12-03 12:30:51 +0000 UTC" firstStartedPulling="2025-12-03 12:30:53.636185816 +0000 UTC m=+1551.063225585" lastFinishedPulling="2025-12-03 12:30:56.162540949 +0000 UTC m=+1553.589580719" observedRunningTime="2025-12-03 12:30:56.672835921 +0000 UTC m=+1554.099875691" watchObservedRunningTime="2025-12-03 12:30:56.67660079 +0000 UTC m=+1554.103640560" Dec 03 12:30:56 crc kubenswrapper[4591]: E1203 12:30:56.891717 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.161504 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.161765 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.192568 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.730404 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.766930 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:31:02 crc kubenswrapper[4591]: I1203 12:31:02.901771 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:31:02 crc kubenswrapper[4591]: E1203 12:31:02.902322 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:31:04 crc kubenswrapper[4591]: I1203 12:31:04.710877 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4bttb" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="registry-server" containerID="cri-o://a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4" gracePeriod=2 Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.061258 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.256591 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities\") pod \"e427c75d-bc99-4d64-94a6-df697e914784\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.256633 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content\") pod \"e427c75d-bc99-4d64-94a6-df697e914784\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.256755 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sljvb\" (UniqueName: \"kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb\") pod \"e427c75d-bc99-4d64-94a6-df697e914784\" (UID: \"e427c75d-bc99-4d64-94a6-df697e914784\") " Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.257285 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities" (OuterVolumeSpecName: "utilities") pod "e427c75d-bc99-4d64-94a6-df697e914784" (UID: "e427c75d-bc99-4d64-94a6-df697e914784"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.257948 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.268620 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb" (OuterVolumeSpecName: "kube-api-access-sljvb") pod "e427c75d-bc99-4d64-94a6-df697e914784" (UID: "e427c75d-bc99-4d64-94a6-df697e914784"). InnerVolumeSpecName "kube-api-access-sljvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.359436 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sljvb\" (UniqueName: \"kubernetes.io/projected/e427c75d-bc99-4d64-94a6-df697e914784-kube-api-access-sljvb\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.445734 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e427c75d-bc99-4d64-94a6-df697e914784" (UID: "e427c75d-bc99-4d64-94a6-df697e914784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.461202 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c75d-bc99-4d64-94a6-df697e914784-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.718872 4591 generic.go:334] "Generic (PLEG): container finished" podID="e427c75d-bc99-4d64-94a6-df697e914784" containerID="a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4" exitCode=0 Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.718926 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bttb" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.718942 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerDied","Data":"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4"} Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.718981 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bttb" event={"ID":"e427c75d-bc99-4d64-94a6-df697e914784","Type":"ContainerDied","Data":"04985b340f1a3e79b19493f3c59058a52e201b1a580370c04b9efe3cbc1bd9cb"} Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.719022 4591 scope.go:117] "RemoveContainer" containerID="a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.735770 4591 scope.go:117] "RemoveContainer" containerID="445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.744642 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.750790 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4bttb"] Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.768878 4591 scope.go:117] "RemoveContainer" containerID="ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.781892 4591 scope.go:117] "RemoveContainer" containerID="a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4" Dec 03 12:31:05 crc kubenswrapper[4591]: E1203 12:31:05.782211 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4\": container with ID starting with a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4 not found: ID does not exist" containerID="a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.782247 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4"} err="failed to get container status \"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4\": rpc error: code = NotFound desc = could not find container \"a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4\": container with ID starting with a25d24652275522c6b7742d6bb1ced51a6b8eb16753aed2999ce5f5b061776b4 not found: ID does not exist" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.782294 4591 scope.go:117] "RemoveContainer" containerID="445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7" Dec 03 12:31:05 crc kubenswrapper[4591]: E1203 12:31:05.782601 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7\": container with ID starting with 445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7 not found: ID does not exist" containerID="445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.782645 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7"} err="failed to get container status \"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7\": rpc error: code = NotFound desc = could not find container \"445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7\": container with ID starting with 445fa54276332621176a223f650416d27e4fee3482429d8bc0211fe7cfdd6dd7 not found: ID does not exist" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.782659 4591 scope.go:117] "RemoveContainer" containerID="ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b" Dec 03 12:31:05 crc kubenswrapper[4591]: E1203 12:31:05.782885 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b\": container with ID starting with ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b not found: ID does not exist" containerID="ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b" Dec 03 12:31:05 crc kubenswrapper[4591]: I1203 12:31:05.782913 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b"} err="failed to get container status \"ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b\": rpc error: code = NotFound desc = could not find container \"ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b\": container with ID starting with ed470feec1ddb8eac5ee5f99b32e79609cd88f4499ad29e74680ea8c9552fa5b not found: ID does not exist" Dec 03 12:31:06 crc kubenswrapper[4591]: I1203 12:31:06.896908 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e427c75d-bc99-4d64-94a6-df697e914784" path="/var/lib/kubelet/pods/e427c75d-bc99-4d64-94a6-df697e914784/volumes" Dec 03 12:31:09 crc kubenswrapper[4591]: E1203 12:31:09.891280 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:31:13 crc kubenswrapper[4591]: I1203 12:31:13.890837 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:31:13 crc kubenswrapper[4591]: E1203 12:31:13.891477 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:31:26 crc kubenswrapper[4591]: I1203 12:31:26.890415 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:31:26 crc kubenswrapper[4591]: E1203 12:31:26.891165 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:31:39 crc kubenswrapper[4591]: I1203 12:31:39.890352 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:31:39 crc kubenswrapper[4591]: E1203 12:31:39.891052 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:31:52 crc kubenswrapper[4591]: I1203 12:31:52.894460 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:31:52 crc kubenswrapper[4591]: E1203 12:31:52.895166 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:32:03 crc kubenswrapper[4591]: I1203 12:32:03.890114 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:32:03 crc kubenswrapper[4591]: E1203 12:32:03.890712 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:32:14 crc kubenswrapper[4591]: I1203 12:32:14.890703 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:32:14 crc kubenswrapper[4591]: E1203 12:32:14.891282 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:32:29 crc kubenswrapper[4591]: I1203 12:32:29.890360 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:32:29 crc kubenswrapper[4591]: E1203 12:32:29.891074 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:32:41 crc kubenswrapper[4591]: I1203 12:32:41.890562 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:32:41 crc kubenswrapper[4591]: E1203 12:32:41.891224 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:32:54 crc kubenswrapper[4591]: I1203 12:32:54.891096 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:32:54 crc kubenswrapper[4591]: E1203 12:32:54.891592 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:33:09 crc kubenswrapper[4591]: I1203 12:33:09.890710 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:33:09 crc kubenswrapper[4591]: E1203 12:33:09.891353 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:33:21 crc kubenswrapper[4591]: I1203 12:33:21.890733 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:33:21 crc kubenswrapper[4591]: E1203 12:33:21.891474 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:33:23 crc kubenswrapper[4591]: E1203 12:33:23.898882 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:33:23 crc kubenswrapper[4591]: E1203 12:33:23.899101 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:33:23 crc kubenswrapper[4591]: E1203 12:33:23.899206 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:33:23 crc kubenswrapper[4591]: E1203 12:33:23.900370 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:33:35 crc kubenswrapper[4591]: I1203 12:33:35.890909 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:33:35 crc kubenswrapper[4591]: E1203 12:33:35.891475 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:33:37 crc kubenswrapper[4591]: E1203 12:33:37.891355 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:33:46 crc kubenswrapper[4591]: I1203 12:33:46.890632 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:33:46 crc kubenswrapper[4591]: E1203 12:33:46.891110 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:33:50 crc kubenswrapper[4591]: E1203 12:33:50.892419 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:33:59 crc kubenswrapper[4591]: I1203 12:33:59.890738 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:33:59 crc kubenswrapper[4591]: E1203 12:33:59.891085 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:34:04 crc kubenswrapper[4591]: E1203 12:34:04.891994 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:34:11 crc kubenswrapper[4591]: I1203 12:34:11.890849 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:34:11 crc kubenswrapper[4591]: E1203 12:34:11.891485 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:34:15 crc kubenswrapper[4591]: E1203 12:34:15.891961 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:34:25 crc kubenswrapper[4591]: I1203 12:34:25.891011 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:34:26 crc kubenswrapper[4591]: I1203 12:34:26.836833 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0"} Dec 03 12:34:26 crc kubenswrapper[4591]: E1203 12:34:26.891673 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:34:38 crc kubenswrapper[4591]: E1203 12:34:38.892197 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:34:51 crc kubenswrapper[4591]: E1203 12:34:51.891661 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:35:04 crc kubenswrapper[4591]: E1203 12:35:04.891804 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:35:18 crc kubenswrapper[4591]: E1203 12:35:18.891985 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:35:32 crc kubenswrapper[4591]: E1203 12:35:32.895201 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:35:43 crc kubenswrapper[4591]: E1203 12:35:43.891739 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.824320 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zjlf5/must-gather-xvdp6"] Dec 03 12:35:46 crc kubenswrapper[4591]: E1203 12:35:46.825889 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="registry-server" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.825969 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="registry-server" Dec 03 12:35:46 crc kubenswrapper[4591]: E1203 12:35:46.826035 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="extract-utilities" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.826133 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="extract-utilities" Dec 03 12:35:46 crc kubenswrapper[4591]: E1203 12:35:46.826221 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="extract-content" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.826279 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="extract-content" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.826620 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="e427c75d-bc99-4d64-94a6-df697e914784" containerName="registry-server" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.833292 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.838790 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zjlf5"/"kube-root-ca.crt" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.839093 4591 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zjlf5"/"openshift-service-ca.crt" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.845586 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zjlf5/must-gather-xvdp6"] Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.917178 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbkhk\" (UniqueName: \"kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:46 crc kubenswrapper[4591]: I1203 12:35:46.917220 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.018447 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbkhk\" (UniqueName: \"kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.018481 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.019580 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.033503 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbkhk\" (UniqueName: \"kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk\") pod \"must-gather-xvdp6\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.162223 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:35:47 crc kubenswrapper[4591]: I1203 12:35:47.516458 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zjlf5/must-gather-xvdp6"] Dec 03 12:35:48 crc kubenswrapper[4591]: I1203 12:35:48.274949 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" event={"ID":"8e19d18f-b0ca-4ec9-a95e-153344349146","Type":"ContainerStarted","Data":"cf5782814b96123c811f56977ad7635640af2955fc2c21e2d12b6ddeda66ac66"} Dec 03 12:35:52 crc kubenswrapper[4591]: I1203 12:35:52.303279 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" event={"ID":"8e19d18f-b0ca-4ec9-a95e-153344349146","Type":"ContainerStarted","Data":"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131"} Dec 03 12:35:53 crc kubenswrapper[4591]: I1203 12:35:53.310351 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" event={"ID":"8e19d18f-b0ca-4ec9-a95e-153344349146","Type":"ContainerStarted","Data":"1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509"} Dec 03 12:35:53 crc kubenswrapper[4591]: I1203 12:35:53.323540 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" podStartSLOduration=2.779113389 podStartE2EDuration="7.323526973s" podCreationTimestamp="2025-12-03 12:35:46 +0000 UTC" firstStartedPulling="2025-12-03 12:35:47.518582247 +0000 UTC m=+1844.945622016" lastFinishedPulling="2025-12-03 12:35:52.06299583 +0000 UTC m=+1849.490035600" observedRunningTime="2025-12-03 12:35:53.320512695 +0000 UTC m=+1850.747552465" watchObservedRunningTime="2025-12-03 12:35:53.323526973 +0000 UTC m=+1850.750566744" Dec 03 12:35:58 crc kubenswrapper[4591]: E1203 12:35:58.891537 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:36:11 crc kubenswrapper[4591]: I1203 12:36:11.892859 4591 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:36:21 crc kubenswrapper[4591]: I1203 12:36:21.129630 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7ftqp_a0d1e9b5-e785-4c6b-ad37-a9f0dc94e248/control-plane-machine-set-operator/0.log" Dec 03 12:36:21 crc kubenswrapper[4591]: I1203 12:36:21.227561 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-czcvh_f46fb0dc-2855-48f1-b744-1b5cc24f00e9/kube-rbac-proxy/0.log" Dec 03 12:36:21 crc kubenswrapper[4591]: I1203 12:36:21.249115 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-czcvh_f46fb0dc-2855-48f1-b744-1b5cc24f00e9/machine-api-operator/0.log" Dec 03 12:36:25 crc kubenswrapper[4591]: I1203 12:36:25.299554 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:36:25 crc kubenswrapper[4591]: I1203 12:36:25.300574 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:36:29 crc kubenswrapper[4591]: I1203 12:36:29.263137 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-ncpgb_b7cb056f-6d6d-4e52-adcc-10c051df9400/cert-manager-controller/0.log" Dec 03 12:36:29 crc kubenswrapper[4591]: I1203 12:36:29.369219 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-wt6ln_551b74b5-4ba8-4b29-b23b-fd955fbe0819/cert-manager-cainjector/0.log" Dec 03 12:36:29 crc kubenswrapper[4591]: I1203 12:36:29.383896 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-h279r_72176880-d1d6-48ed-9099-15650cd27b96/cert-manager-webhook/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.013934 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-phtsg_d50528f6-5b01-45a7-8cc7-3bc7044a3769/nmstate-console-plugin/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.150358 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-542md_64362553-8965-4c23-82f4-fddf7acbb589/nmstate-handler/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.171776 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-kjjft_5f031f8c-04a6-4523-9f1a-a70e06900dc1/kube-rbac-proxy/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.213038 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-kjjft_5f031f8c-04a6-4523-9f1a-a70e06900dc1/nmstate-metrics/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.329440 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-ffk7c_e44b18b0-0553-438b-b325-1d86e705999e/nmstate-operator/0.log" Dec 03 12:36:38 crc kubenswrapper[4591]: I1203 12:36:38.353222 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-kkdq9_c6152ed7-d2a1-47dd-a9dc-c208f72b7f0c/nmstate-webhook/0.log" Dec 03 12:36:46 crc kubenswrapper[4591]: I1203 12:36:46.844455 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6d78b5768d-sb2l8_5d7a891c-0e61-49b3-856e-0a35b5d53b03/kube-rbac-proxy/0.log" Dec 03 12:36:46 crc kubenswrapper[4591]: I1203 12:36:46.888454 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6d78b5768d-sb2l8_5d7a891c-0e61-49b3-856e-0a35b5d53b03/manager/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.299220 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.299661 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.489167 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-ff9846bd-95tbr_a1a26c4d-0460-4cb8-8f10-6d46fd68bbf2/cluster-logging-operator/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.549261 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-fz4zs_cca898ab-1b71-41a7-a104-9b7e584166d2/collector/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.635688 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_43b7f0c2-48eb-4736-98fe-bac9553de422/loki-compactor/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.687668 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-76cc67bf56-9b7f9_ca5e0135-4162-41eb-8bb0-4bbe375f13dc/loki-distributor/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.789464 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-5f86f97d54-c4dmm_480391ab-2b77-43a4-96a8-c821e57eb922/gateway/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.792689 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-5f86f97d54-c4dmm_480391ab-2b77-43a4-96a8-c821e57eb922/opa/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.883904 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-5f86f97d54-cwgrl_8395cf21-0ee1-4760-8529-0b7be4c16b92/gateway/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.932570 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-5f86f97d54-cwgrl_8395cf21-0ee1-4760-8529-0b7be4c16b92/opa/0.log" Dec 03 12:36:55 crc kubenswrapper[4591]: I1203 12:36:55.979037 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_64b2bf76-1240-4b84-8881-c41e73fbe414/loki-index-gateway/0.log" Dec 03 12:36:56 crc kubenswrapper[4591]: I1203 12:36:56.073979 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_b519e20b-91ed-48f1-b3e0-9840d35ab56e/loki-ingester/0.log" Dec 03 12:36:56 crc kubenswrapper[4591]: I1203 12:36:56.128049 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-5895d59bb8-zk2rp_58cfe77c-1a62-4d74-b40b-222ca7031658/loki-querier/0.log" Dec 03 12:36:56 crc kubenswrapper[4591]: I1203 12:36:56.208307 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-84558f7c9f-fv8vs_836410c1-1063-427d-a270-dcd93f89dfd5/loki-query-frontend/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.600605 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-t29t8_102945b3-6f0d-43b1-b347-7a7fd637ce88/kube-rbac-proxy/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.725320 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-t29t8_102945b3-6f0d-43b1-b347-7a7fd637ce88/controller/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.769775 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-frr-files/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.878596 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-reloader/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.881282 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-metrics/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.901832 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-frr-files/0.log" Dec 03 12:37:05 crc kubenswrapper[4591]: I1203 12:37:05.922797 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-reloader/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.033570 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-frr-files/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.060717 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-reloader/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.066400 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-metrics/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.096163 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-metrics/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.167449 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-frr-files/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.195632 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-reloader/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.196913 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/cp-metrics/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.227373 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/controller/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.328536 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/kube-rbac-proxy/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.351457 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/frr/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.357553 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/frr-metrics/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.393352 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/kube-rbac-proxy-frr/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.465873 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-czlhw_9bf70e0c-4a12-4965-85c9-212b85622dc0/reloader/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.508973 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-r82b4_09e7c06e-c8f7-42c8-81c1-dda14b5d609e/frr-k8s-webhook-server/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.610866 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-56654d8dc6-cw82v_89a2cfa8-cba7-4315-8f4a-dc73986ea26b/manager/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.651531 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-c9cc967c7-kgksn_35810136-753c-4212-ba53-251ecd811b4a/webhook-server/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.730658 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-sk4bx_18d17b40-9915-4646-a939-0c23134df87e/kube-rbac-proxy/0.log" Dec 03 12:37:06 crc kubenswrapper[4591]: I1203 12:37:06.951799 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-sk4bx_18d17b40-9915-4646-a939-0c23134df87e/speaker/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.122984 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.243057 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.247574 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.254236 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.350405 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.368046 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.397244 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb82m87l_9aab9ae9-de15-483b-96d1-1838b473c557/extract/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.477718 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.599156 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.603508 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.606977 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.733429 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.739533 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/extract/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.739888 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f4w4f2_fa097088-c6b3-427e-a0fb-def2a61e6640/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.844682 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/util/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.970844 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.971167 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/pull/0.log" Dec 03 12:37:15 crc kubenswrapper[4591]: I1203 12:37:15.983056 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.097073 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.104407 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/extract/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.104498 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92104f5pt_27e3cdd3-0d25-4358-9c11-52196145226d/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.205314 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.330574 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.331495 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.349384 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.465571 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.467233 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.467861 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463fp6rjw_6c6032e3-7ac5-4f93-97df-c35b975f6d17/extract/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.571766 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.705270 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.709010 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.712085 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.845916 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/util/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.850208 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/pull/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.869458 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f839twhq_111df2c3-a57c-412d-b593-390f211cc05c/extract/0.log" Dec 03 12:37:16 crc kubenswrapper[4591]: I1203 12:37:16.958956 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.070841 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.074540 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.075102 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.201715 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.202923 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.374282 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.500860 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tnlm8_10dbb454-fa06-48e9-b129-d0b68864515f/registry-server/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.528981 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.537736 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.558615 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.691616 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-content/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.700209 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.858157 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-lrx9r_36715c77-dc19-46de-b452-6f43fef4b296/marketplace-operator/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.923168 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-utilities/0.log" Dec 03 12:37:17 crc kubenswrapper[4591]: I1203 12:37:17.943900 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pkxmh_495b88b0-ab9f-45d4-b257-da87febda2bb/registry-server/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.020465 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-utilities/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.025612 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.050180 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.149338 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-utilities/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.162586 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.216803 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r27xd_3b4b9cef-ccab-403d-9dad-5d04a216bc01/registry-server/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.227185 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-utilities/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.343439 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.343867 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.344610 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-utilities/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.452102 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-utilities/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.472218 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/extract-content/0.log" Dec 03 12:37:18 crc kubenswrapper[4591]: I1203 12:37:18.654964 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q2fg9_afa3d606-fc50-40b2-9b33-1fcc258faf3d/registry-server/0.log" Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.300195 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.300579 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.300620 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.301217 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.301332 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0" gracePeriod=600 Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.858654 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0" exitCode=0 Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.858758 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0"} Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.858921 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerStarted","Data":"03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa"} Dec 03 12:37:25 crc kubenswrapper[4591]: I1203 12:37:25.858945 4591 scope.go:117] "RemoveContainer" containerID="32d9b0b6f49b151bf010cdf616f92d05800563dac8bee6ae4a80205a76a35475" Dec 03 12:37:26 crc kubenswrapper[4591]: I1203 12:37:26.058296 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-c4cdg_9ddbe8c9-be6f-4bae-ac8e-18800b197f3b/prometheus-operator/0.log" Dec 03 12:37:26 crc kubenswrapper[4591]: I1203 12:37:26.146646 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-56547b7db6-nm58w_4a7721a3-0074-41f9-b794-6f739872fccf/prometheus-operator-admission-webhook/0.log" Dec 03 12:37:26 crc kubenswrapper[4591]: I1203 12:37:26.201285 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-56547b7db6-wrjpg_ca94d274-d7be-4190-9703-65e47e03b0a5/prometheus-operator-admission-webhook/0.log" Dec 03 12:37:26 crc kubenswrapper[4591]: I1203 12:37:26.261564 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-rdtt7_98f8ce4f-b88f-4051-89c7-7c2dbfbcbe2a/operator/0.log" Dec 03 12:37:26 crc kubenswrapper[4591]: I1203 12:37:26.323316 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-b7b9r_2c9360e0-cdca-4f3b-895b-27de95ec9f40/perses-operator/0.log" Dec 03 12:37:34 crc kubenswrapper[4591]: I1203 12:37:34.268911 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6d78b5768d-sb2l8_5d7a891c-0e61-49b3-856e-0a35b5d53b03/kube-rbac-proxy/0.log" Dec 03 12:37:34 crc kubenswrapper[4591]: I1203 12:37:34.319277 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6d78b5768d-sb2l8_5d7a891c-0e61-49b3-856e-0a35b5d53b03/manager/0.log" Dec 03 12:38:11 crc kubenswrapper[4591]: E1203 12:38:11.899419 4591 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:38:11 crc kubenswrapper[4591]: E1203 12:38:11.899897 4591 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" image="38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:38:11 crc kubenswrapper[4591]: E1203 12:38:11.900009 4591 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6p6rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-v5fln_openstack-operators(9b97239d-8d0f-443b-bc87-4c56b518bab3): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \"http://38.102.83.13:5001/v2/\": dial tcp 38.102.83.13:5001: i/o timeout" logger="UnhandledError" Dec 03 12:38:11 crc kubenswrapper[4591]: E1203 12:38:11.901199 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.13:5001: Get \\\"http://38.102.83.13:5001/v2/\\\": dial tcp 38.102.83.13:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:38:17 crc kubenswrapper[4591]: I1203 12:38:17.137774 4591 generic.go:334] "Generic (PLEG): container finished" podID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerID="5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131" exitCode=0 Dec 03 12:38:17 crc kubenswrapper[4591]: I1203 12:38:17.138178 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" event={"ID":"8e19d18f-b0ca-4ec9-a95e-153344349146","Type":"ContainerDied","Data":"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131"} Dec 03 12:38:17 crc kubenswrapper[4591]: I1203 12:38:17.138600 4591 scope.go:117] "RemoveContainer" containerID="5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131" Dec 03 12:38:18 crc kubenswrapper[4591]: I1203 12:38:18.034933 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjlf5_must-gather-xvdp6_8e19d18f-b0ca-4ec9-a95e-153344349146/gather/0.log" Dec 03 12:38:24 crc kubenswrapper[4591]: I1203 12:38:24.682559 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zjlf5/must-gather-xvdp6"] Dec 03 12:38:24 crc kubenswrapper[4591]: I1203 12:38:24.683153 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="copy" containerID="cri-o://1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509" gracePeriod=2 Dec 03 12:38:24 crc kubenswrapper[4591]: I1203 12:38:24.687508 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zjlf5/must-gather-xvdp6"] Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.037000 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjlf5_must-gather-xvdp6_8e19d18f-b0ca-4ec9-a95e-153344349146/copy/0.log" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.037502 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.122745 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbkhk\" (UniqueName: \"kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk\") pod \"8e19d18f-b0ca-4ec9-a95e-153344349146\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.122783 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output\") pod \"8e19d18f-b0ca-4ec9-a95e-153344349146\" (UID: \"8e19d18f-b0ca-4ec9-a95e-153344349146\") " Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.127502 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk" (OuterVolumeSpecName: "kube-api-access-xbkhk") pod "8e19d18f-b0ca-4ec9-a95e-153344349146" (UID: "8e19d18f-b0ca-4ec9-a95e-153344349146"). InnerVolumeSpecName "kube-api-access-xbkhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.176938 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "8e19d18f-b0ca-4ec9-a95e-153344349146" (UID: "8e19d18f-b0ca-4ec9-a95e-153344349146"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.191174 4591 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zjlf5_must-gather-xvdp6_8e19d18f-b0ca-4ec9-a95e-153344349146/copy/0.log" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.191631 4591 generic.go:334] "Generic (PLEG): container finished" podID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerID="1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509" exitCode=143 Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.191686 4591 scope.go:117] "RemoveContainer" containerID="1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.191710 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zjlf5/must-gather-xvdp6" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.207443 4591 scope.go:117] "RemoveContainer" containerID="5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.225327 4591 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8e19d18f-b0ca-4ec9-a95e-153344349146-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.225357 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbkhk\" (UniqueName: \"kubernetes.io/projected/8e19d18f-b0ca-4ec9-a95e-153344349146-kube-api-access-xbkhk\") on node \"crc\" DevicePath \"\"" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.251121 4591 scope.go:117] "RemoveContainer" containerID="1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509" Dec 03 12:38:25 crc kubenswrapper[4591]: E1203 12:38:25.251424 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509\": container with ID starting with 1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509 not found: ID does not exist" containerID="1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.251455 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509"} err="failed to get container status \"1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509\": rpc error: code = NotFound desc = could not find container \"1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509\": container with ID starting with 1cd8257c2ede0b3aacf84dba39f6372b7492daae5125edd5ebe6c17dbbc47509 not found: ID does not exist" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.251476 4591 scope.go:117] "RemoveContainer" containerID="5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131" Dec 03 12:38:25 crc kubenswrapper[4591]: E1203 12:38:25.251719 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131\": container with ID starting with 5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131 not found: ID does not exist" containerID="5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131" Dec 03 12:38:25 crc kubenswrapper[4591]: I1203 12:38:25.251763 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131"} err="failed to get container status \"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131\": rpc error: code = NotFound desc = could not find container \"5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131\": container with ID starting with 5b217cd51a948664539de1351f40ffb19162079aabc2e15c3304331551b0b131 not found: ID does not exist" Dec 03 12:38:26 crc kubenswrapper[4591]: E1203 12:38:26.891949 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:38:26 crc kubenswrapper[4591]: I1203 12:38:26.897259 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" path="/var/lib/kubelet/pods/8e19d18f-b0ca-4ec9-a95e-153344349146/volumes" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.149464 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:27 crc kubenswrapper[4591]: E1203 12:38:27.149722 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="gather" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.149738 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="gather" Dec 03 12:38:27 crc kubenswrapper[4591]: E1203 12:38:27.149753 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="copy" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.149758 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="copy" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.149865 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="gather" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.149884 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e19d18f-b0ca-4ec9-a95e-153344349146" containerName="copy" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.150798 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.157024 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.251412 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.251473 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8ss7\" (UniqueName: \"kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.251660 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.352633 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.352712 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.352751 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8ss7\" (UniqueName: \"kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.353043 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.353172 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.370164 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8ss7\" (UniqueName: \"kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7\") pod \"redhat-operators-r6lqj\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.463455 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:27 crc kubenswrapper[4591]: I1203 12:38:27.850454 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:28 crc kubenswrapper[4591]: I1203 12:38:28.217611 4591 generic.go:334] "Generic (PLEG): container finished" podID="0348b2c3-960f-452b-93f5-5966cb677014" containerID="929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162" exitCode=0 Dec 03 12:38:28 crc kubenswrapper[4591]: I1203 12:38:28.217658 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerDied","Data":"929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162"} Dec 03 12:38:28 crc kubenswrapper[4591]: I1203 12:38:28.217697 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerStarted","Data":"9cae39a4d07b82db89a45880582fbe7c440885be90756f21e303ee6879ac354b"} Dec 03 12:38:29 crc kubenswrapper[4591]: I1203 12:38:29.226988 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerStarted","Data":"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05"} Dec 03 12:38:30 crc kubenswrapper[4591]: I1203 12:38:30.235289 4591 generic.go:334] "Generic (PLEG): container finished" podID="0348b2c3-960f-452b-93f5-5966cb677014" containerID="d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05" exitCode=0 Dec 03 12:38:30 crc kubenswrapper[4591]: I1203 12:38:30.235348 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerDied","Data":"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05"} Dec 03 12:38:31 crc kubenswrapper[4591]: I1203 12:38:31.242715 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerStarted","Data":"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507"} Dec 03 12:38:31 crc kubenswrapper[4591]: I1203 12:38:31.255886 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r6lqj" podStartSLOduration=1.671968648 podStartE2EDuration="4.255867298s" podCreationTimestamp="2025-12-03 12:38:27 +0000 UTC" firstStartedPulling="2025-12-03 12:38:28.219364769 +0000 UTC m=+2005.646404539" lastFinishedPulling="2025-12-03 12:38:30.803263419 +0000 UTC m=+2008.230303189" observedRunningTime="2025-12-03 12:38:31.254697548 +0000 UTC m=+2008.681737318" watchObservedRunningTime="2025-12-03 12:38:31.255867298 +0000 UTC m=+2008.682907068" Dec 03 12:38:37 crc kubenswrapper[4591]: I1203 12:38:37.464389 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:37 crc kubenswrapper[4591]: I1203 12:38:37.465629 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:37 crc kubenswrapper[4591]: I1203 12:38:37.496780 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:38 crc kubenswrapper[4591]: I1203 12:38:38.309413 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:38 crc kubenswrapper[4591]: I1203 12:38:38.340012 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.287962 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r6lqj" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="registry-server" containerID="cri-o://1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507" gracePeriod=2 Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.608992 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.739323 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8ss7\" (UniqueName: \"kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7\") pod \"0348b2c3-960f-452b-93f5-5966cb677014\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.739391 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content\") pod \"0348b2c3-960f-452b-93f5-5966cb677014\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.739568 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities\") pod \"0348b2c3-960f-452b-93f5-5966cb677014\" (UID: \"0348b2c3-960f-452b-93f5-5966cb677014\") " Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.740145 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities" (OuterVolumeSpecName: "utilities") pod "0348b2c3-960f-452b-93f5-5966cb677014" (UID: "0348b2c3-960f-452b-93f5-5966cb677014"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.744385 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7" (OuterVolumeSpecName: "kube-api-access-j8ss7") pod "0348b2c3-960f-452b-93f5-5966cb677014" (UID: "0348b2c3-960f-452b-93f5-5966cb677014"). InnerVolumeSpecName "kube-api-access-j8ss7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.811859 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0348b2c3-960f-452b-93f5-5966cb677014" (UID: "0348b2c3-960f-452b-93f5-5966cb677014"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.840985 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.841016 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8ss7\" (UniqueName: \"kubernetes.io/projected/0348b2c3-960f-452b-93f5-5966cb677014-kube-api-access-j8ss7\") on node \"crc\" DevicePath \"\"" Dec 03 12:38:40 crc kubenswrapper[4591]: I1203 12:38:40.841028 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0348b2c3-960f-452b-93f5-5966cb677014-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.295255 4591 generic.go:334] "Generic (PLEG): container finished" podID="0348b2c3-960f-452b-93f5-5966cb677014" containerID="1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507" exitCode=0 Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.295361 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r6lqj" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.295358 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerDied","Data":"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507"} Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.295578 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r6lqj" event={"ID":"0348b2c3-960f-452b-93f5-5966cb677014","Type":"ContainerDied","Data":"9cae39a4d07b82db89a45880582fbe7c440885be90756f21e303ee6879ac354b"} Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.295597 4591 scope.go:117] "RemoveContainer" containerID="1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.309510 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.316866 4591 scope.go:117] "RemoveContainer" containerID="d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.318035 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r6lqj"] Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.330973 4591 scope.go:117] "RemoveContainer" containerID="929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.349310 4591 scope.go:117] "RemoveContainer" containerID="1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507" Dec 03 12:38:41 crc kubenswrapper[4591]: E1203 12:38:41.349695 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507\": container with ID starting with 1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507 not found: ID does not exist" containerID="1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.349734 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507"} err="failed to get container status \"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507\": rpc error: code = NotFound desc = could not find container \"1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507\": container with ID starting with 1761957c5ecbcfb48f78b98d1a38d7407f526c8720fa615c50f63dcc9ecfb507 not found: ID does not exist" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.349760 4591 scope.go:117] "RemoveContainer" containerID="d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05" Dec 03 12:38:41 crc kubenswrapper[4591]: E1203 12:38:41.350000 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05\": container with ID starting with d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05 not found: ID does not exist" containerID="d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.350027 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05"} err="failed to get container status \"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05\": rpc error: code = NotFound desc = could not find container \"d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05\": container with ID starting with d2db695aaaa8d2bc3e09260104392e5c0400a25978fcd3b10a890a29e586de05 not found: ID does not exist" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.350045 4591 scope.go:117] "RemoveContainer" containerID="929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162" Dec 03 12:38:41 crc kubenswrapper[4591]: E1203 12:38:41.350278 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162\": container with ID starting with 929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162 not found: ID does not exist" containerID="929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162" Dec 03 12:38:41 crc kubenswrapper[4591]: I1203 12:38:41.350309 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162"} err="failed to get container status \"929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162\": rpc error: code = NotFound desc = could not find container \"929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162\": container with ID starting with 929084406bab415a96755cfd1b6eadb90640fcf6e44ca8717858e9ad6918a162 not found: ID does not exist" Dec 03 12:38:41 crc kubenswrapper[4591]: E1203 12:38:41.891688 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:38:42 crc kubenswrapper[4591]: I1203 12:38:42.897485 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0348b2c3-960f-452b-93f5-5966cb677014" path="/var/lib/kubelet/pods/0348b2c3-960f-452b-93f5-5966cb677014/volumes" Dec 03 12:38:55 crc kubenswrapper[4591]: E1203 12:38:55.891939 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.818977 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:00 crc kubenswrapper[4591]: E1203 12:39:00.819761 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="registry-server" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.819784 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="registry-server" Dec 03 12:39:00 crc kubenswrapper[4591]: E1203 12:39:00.819817 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="extract-content" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.819824 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="extract-content" Dec 03 12:39:00 crc kubenswrapper[4591]: E1203 12:39:00.819839 4591 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="extract-utilities" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.819849 4591 state_mem.go:107] "Deleted CPUSet assignment" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="extract-utilities" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.820013 4591 memory_manager.go:354] "RemoveStaleState removing state" podUID="0348b2c3-960f-452b-93f5-5966cb677014" containerName="registry-server" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.820990 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.827980 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.911497 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.911549 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w9zs\" (UniqueName: \"kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:00 crc kubenswrapper[4591]: I1203 12:39:00.911828 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.013148 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w9zs\" (UniqueName: \"kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.013263 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.013318 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.013632 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.013652 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.020889 4591 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.022212 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.031719 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w9zs\" (UniqueName: \"kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs\") pod \"community-operators-68knk\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.038463 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.116382 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.116733 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.117098 4591 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtcx6\" (UniqueName: \"kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.143651 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.218857 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.219327 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.219423 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.219517 4591 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtcx6\" (UniqueName: \"kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.219646 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.237849 4591 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtcx6\" (UniqueName: \"kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6\") pod \"redhat-marketplace-sswph\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.354689 4591 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:01 crc kubenswrapper[4591]: I1203 12:39:01.552203 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:01 crc kubenswrapper[4591]: W1203 12:39:01.559041 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebc1bb1a_b115_43ff_a381_e49465f3df53.slice/crio-4e0ad85da08282b509f93e9f4eb26a22051bf9421cafe86391cb876eaa0feb8a WatchSource:0}: Error finding container 4e0ad85da08282b509f93e9f4eb26a22051bf9421cafe86391cb876eaa0feb8a: Status 404 returned error can't find the container with id 4e0ad85da08282b509f93e9f4eb26a22051bf9421cafe86391cb876eaa0feb8a Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.001233 4591 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:02 crc kubenswrapper[4591]: W1203 12:39:02.003446 4591 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0728065b_632b_42ee_a802_5508cabf2293.slice/crio-eb10151d689b590dc97d21dd9ecec0397094bcd849fc46e51db94c3624252f60 WatchSource:0}: Error finding container eb10151d689b590dc97d21dd9ecec0397094bcd849fc46e51db94c3624252f60: Status 404 returned error can't find the container with id eb10151d689b590dc97d21dd9ecec0397094bcd849fc46e51db94c3624252f60 Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.413384 4591 generic.go:334] "Generic (PLEG): container finished" podID="0728065b-632b-42ee-a802-5508cabf2293" containerID="e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93" exitCode=0 Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.413478 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerDied","Data":"e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93"} Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.413648 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerStarted","Data":"eb10151d689b590dc97d21dd9ecec0397094bcd849fc46e51db94c3624252f60"} Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.415686 4591 generic.go:334] "Generic (PLEG): container finished" podID="ebc1bb1a-b115-43ff-a381-e49465f3df53" containerID="3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6" exitCode=0 Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.415732 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerDied","Data":"3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6"} Dec 03 12:39:02 crc kubenswrapper[4591]: I1203 12:39:02.415750 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerStarted","Data":"4e0ad85da08282b509f93e9f4eb26a22051bf9421cafe86391cb876eaa0feb8a"} Dec 03 12:39:03 crc kubenswrapper[4591]: I1203 12:39:03.423496 4591 generic.go:334] "Generic (PLEG): container finished" podID="0728065b-632b-42ee-a802-5508cabf2293" containerID="dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3" exitCode=0 Dec 03 12:39:03 crc kubenswrapper[4591]: I1203 12:39:03.423669 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerDied","Data":"dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3"} Dec 03 12:39:03 crc kubenswrapper[4591]: I1203 12:39:03.426163 4591 generic.go:334] "Generic (PLEG): container finished" podID="ebc1bb1a-b115-43ff-a381-e49465f3df53" containerID="2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a" exitCode=0 Dec 03 12:39:03 crc kubenswrapper[4591]: I1203 12:39:03.426197 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerDied","Data":"2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a"} Dec 03 12:39:04 crc kubenswrapper[4591]: I1203 12:39:04.434185 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerStarted","Data":"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d"} Dec 03 12:39:04 crc kubenswrapper[4591]: I1203 12:39:04.436871 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerStarted","Data":"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6"} Dec 03 12:39:04 crc kubenswrapper[4591]: I1203 12:39:04.457731 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-68knk" podStartSLOduration=2.901151531 podStartE2EDuration="4.457715151s" podCreationTimestamp="2025-12-03 12:39:00 +0000 UTC" firstStartedPulling="2025-12-03 12:39:02.417723753 +0000 UTC m=+2039.844763523" lastFinishedPulling="2025-12-03 12:39:03.974287373 +0000 UTC m=+2041.401327143" observedRunningTime="2025-12-03 12:39:04.451762909 +0000 UTC m=+2041.878802678" watchObservedRunningTime="2025-12-03 12:39:04.457715151 +0000 UTC m=+2041.884754921" Dec 03 12:39:04 crc kubenswrapper[4591]: I1203 12:39:04.468020 4591 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sswph" podStartSLOduration=1.906924716 podStartE2EDuration="3.468006292s" podCreationTimestamp="2025-12-03 12:39:01 +0000 UTC" firstStartedPulling="2025-12-03 12:39:02.41496229 +0000 UTC m=+2039.842002060" lastFinishedPulling="2025-12-03 12:39:03.976043866 +0000 UTC m=+2041.403083636" observedRunningTime="2025-12-03 12:39:04.464994359 +0000 UTC m=+2041.892034129" watchObservedRunningTime="2025-12-03 12:39:04.468006292 +0000 UTC m=+2041.895046062" Dec 03 12:39:08 crc kubenswrapper[4591]: E1203 12:39:08.892610 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.144578 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.144847 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.201306 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.355520 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.355608 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.390376 4591 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.515838 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:11 crc kubenswrapper[4591]: I1203 12:39:11.515912 4591 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:15 crc kubenswrapper[4591]: I1203 12:39:15.813700 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:15 crc kubenswrapper[4591]: I1203 12:39:15.814320 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-68knk" podUID="ebc1bb1a-b115-43ff-a381-e49465f3df53" containerName="registry-server" containerID="cri-o://f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d" gracePeriod=2 Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.013966 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.014182 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sswph" podUID="0728065b-632b-42ee-a802-5508cabf2293" containerName="registry-server" containerID="cri-o://8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6" gracePeriod=2 Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.213804 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.358335 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.372557 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities\") pod \"ebc1bb1a-b115-43ff-a381-e49465f3df53\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.372787 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w9zs\" (UniqueName: \"kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs\") pod \"ebc1bb1a-b115-43ff-a381-e49465f3df53\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.373051 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content\") pod \"ebc1bb1a-b115-43ff-a381-e49465f3df53\" (UID: \"ebc1bb1a-b115-43ff-a381-e49465f3df53\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.373094 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities\") pod \"0728065b-632b-42ee-a802-5508cabf2293\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.373333 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities" (OuterVolumeSpecName: "utilities") pod "ebc1bb1a-b115-43ff-a381-e49465f3df53" (UID: "ebc1bb1a-b115-43ff-a381-e49465f3df53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.373432 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.373823 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities" (OuterVolumeSpecName: "utilities") pod "0728065b-632b-42ee-a802-5508cabf2293" (UID: "0728065b-632b-42ee-a802-5508cabf2293"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.377693 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs" (OuterVolumeSpecName: "kube-api-access-7w9zs") pod "ebc1bb1a-b115-43ff-a381-e49465f3df53" (UID: "ebc1bb1a-b115-43ff-a381-e49465f3df53"). InnerVolumeSpecName "kube-api-access-7w9zs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.415481 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebc1bb1a-b115-43ff-a381-e49465f3df53" (UID: "ebc1bb1a-b115-43ff-a381-e49465f3df53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.474272 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content\") pod \"0728065b-632b-42ee-a802-5508cabf2293\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.474412 4591 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtcx6\" (UniqueName: \"kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6\") pod \"0728065b-632b-42ee-a802-5508cabf2293\" (UID: \"0728065b-632b-42ee-a802-5508cabf2293\") " Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.474753 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w9zs\" (UniqueName: \"kubernetes.io/projected/ebc1bb1a-b115-43ff-a381-e49465f3df53-kube-api-access-7w9zs\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.474773 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc1bb1a-b115-43ff-a381-e49465f3df53-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.474781 4591 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.476902 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6" (OuterVolumeSpecName: "kube-api-access-qtcx6") pod "0728065b-632b-42ee-a802-5508cabf2293" (UID: "0728065b-632b-42ee-a802-5508cabf2293"). InnerVolumeSpecName "kube-api-access-qtcx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.486785 4591 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0728065b-632b-42ee-a802-5508cabf2293" (UID: "0728065b-632b-42ee-a802-5508cabf2293"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.518467 4591 generic.go:334] "Generic (PLEG): container finished" podID="0728065b-632b-42ee-a802-5508cabf2293" containerID="8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6" exitCode=0 Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.518522 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sswph" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.518525 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerDied","Data":"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6"} Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.518637 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sswph" event={"ID":"0728065b-632b-42ee-a802-5508cabf2293","Type":"ContainerDied","Data":"eb10151d689b590dc97d21dd9ecec0397094bcd849fc46e51db94c3624252f60"} Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.518655 4591 scope.go:117] "RemoveContainer" containerID="8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.523221 4591 generic.go:334] "Generic (PLEG): container finished" podID="ebc1bb1a-b115-43ff-a381-e49465f3df53" containerID="f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d" exitCode=0 Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.523253 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerDied","Data":"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d"} Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.523372 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-68knk" event={"ID":"ebc1bb1a-b115-43ff-a381-e49465f3df53","Type":"ContainerDied","Data":"4e0ad85da08282b509f93e9f4eb26a22051bf9421cafe86391cb876eaa0feb8a"} Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.523264 4591 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-68knk" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.534350 4591 scope.go:117] "RemoveContainer" containerID="dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.548951 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.560338 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sswph"] Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.562751 4591 scope.go:117] "RemoveContainer" containerID="e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.563125 4591 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.567416 4591 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-68knk"] Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.575942 4591 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtcx6\" (UniqueName: \"kubernetes.io/projected/0728065b-632b-42ee-a802-5508cabf2293-kube-api-access-qtcx6\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.575965 4591 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0728065b-632b-42ee-a802-5508cabf2293-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.576611 4591 scope.go:117] "RemoveContainer" containerID="8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.576989 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6\": container with ID starting with 8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6 not found: ID does not exist" containerID="8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577019 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6"} err="failed to get container status \"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6\": rpc error: code = NotFound desc = could not find container \"8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6\": container with ID starting with 8f3d7487cfc77161807f030c6afc90933268d30280634506dcd54bee1ef768b6 not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577038 4591 scope.go:117] "RemoveContainer" containerID="dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.577454 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3\": container with ID starting with dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3 not found: ID does not exist" containerID="dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577481 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3"} err="failed to get container status \"dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3\": rpc error: code = NotFound desc = could not find container \"dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3\": container with ID starting with dc1b4dbad7469e2484372b2d6bcd8ff2cc40e66b042e96dc9cc6658be550bfa3 not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577502 4591 scope.go:117] "RemoveContainer" containerID="e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.577812 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93\": container with ID starting with e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93 not found: ID does not exist" containerID="e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577837 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93"} err="failed to get container status \"e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93\": rpc error: code = NotFound desc = could not find container \"e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93\": container with ID starting with e7d10ccad002b6c800c28a01af82df4eb9c9ee81c33ee0176a1186477759da93 not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.577855 4591 scope.go:117] "RemoveContainer" containerID="f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.589622 4591 scope.go:117] "RemoveContainer" containerID="2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.604712 4591 scope.go:117] "RemoveContainer" containerID="3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.618328 4591 scope.go:117] "RemoveContainer" containerID="f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.618555 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d\": container with ID starting with f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d not found: ID does not exist" containerID="f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.618585 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d"} err="failed to get container status \"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d\": rpc error: code = NotFound desc = could not find container \"f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d\": container with ID starting with f02c1c538ecbad13ba3340b616d482f5e90c3d7b3531f8c0d33313618ae58c5d not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.618608 4591 scope.go:117] "RemoveContainer" containerID="2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.618800 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a\": container with ID starting with 2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a not found: ID does not exist" containerID="2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.618825 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a"} err="failed to get container status \"2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a\": rpc error: code = NotFound desc = could not find container \"2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a\": container with ID starting with 2c2286671b7d171d15d50a0db20fc2103302957eecd849c604b28ed921d74e7a not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.618840 4591 scope.go:117] "RemoveContainer" containerID="3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6" Dec 03 12:39:16 crc kubenswrapper[4591]: E1203 12:39:16.619014 4591 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6\": container with ID starting with 3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6 not found: ID does not exist" containerID="3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.619034 4591 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6"} err="failed to get container status \"3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6\": rpc error: code = NotFound desc = could not find container \"3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6\": container with ID starting with 3bb23913dfed2c56f4922820d153b009e93df927d5338c4a316eea26a05165e6 not found: ID does not exist" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.897621 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0728065b-632b-42ee-a802-5508cabf2293" path="/var/lib/kubelet/pods/0728065b-632b-42ee-a802-5508cabf2293/volumes" Dec 03 12:39:16 crc kubenswrapper[4591]: I1203 12:39:16.898280 4591 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebc1bb1a-b115-43ff-a381-e49465f3df53" path="/var/lib/kubelet/pods/ebc1bb1a-b115-43ff-a381-e49465f3df53/volumes" Dec 03 12:39:21 crc kubenswrapper[4591]: E1203 12:39:21.892606 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:39:25 crc kubenswrapper[4591]: I1203 12:39:25.299822 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:39:25 crc kubenswrapper[4591]: I1203 12:39:25.300132 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:39:32 crc kubenswrapper[4591]: E1203 12:39:32.895225 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:39:45 crc kubenswrapper[4591]: E1203 12:39:45.891894 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:39:55 crc kubenswrapper[4591]: I1203 12:39:55.300092 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:39:55 crc kubenswrapper[4591]: I1203 12:39:55.300465 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:39:59 crc kubenswrapper[4591]: E1203 12:39:59.892516 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:40:14 crc kubenswrapper[4591]: E1203 12:40:14.891360 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.299219 4591 patch_prober.go:28] interesting pod/machine-config-daemon-mnzzd container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.299649 4591 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.299690 4591 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.300191 4591 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa"} pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.300244 4591 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerName="machine-config-daemon" containerID="cri-o://03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa" gracePeriod=600 Dec 03 12:40:25 crc kubenswrapper[4591]: E1203 12:40:25.421929 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:40:25 crc kubenswrapper[4591]: E1203 12:40:25.892230 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.949147 4591 generic.go:334] "Generic (PLEG): container finished" podID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" containerID="03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa" exitCode=0 Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.949183 4591 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" event={"ID":"96827b8d-1081-4acd-a2db-c2fa3a87b42a","Type":"ContainerDied","Data":"03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa"} Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.949211 4591 scope.go:117] "RemoveContainer" containerID="1d5a89c7a12db5e1168ad0d117dc3c7980e276a04d5ca9cb2411eeb281205cc0" Dec 03 12:40:25 crc kubenswrapper[4591]: I1203 12:40:25.949899 4591 scope.go:117] "RemoveContainer" containerID="03886246aa34fbd0ea08c4d11f4512fd367a31ef76fe41b6e64d6fee323786aa" Dec 03 12:40:25 crc kubenswrapper[4591]: E1203 12:40:25.950168 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mnzzd_openshift-machine-config-operator(96827b8d-1081-4acd-a2db-c2fa3a87b42a)\"" pod="openshift-machine-config-operator/machine-config-daemon-mnzzd" podUID="96827b8d-1081-4acd-a2db-c2fa3a87b42a" Dec 03 12:40:36 crc kubenswrapper[4591]: E1203 12:40:36.893156 4591 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.13:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-v5fln" podUID="9b97239d-8d0f-443b-bc87-4c56b518bab3" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114027714024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114027714017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114023345016503 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114023345015453 5ustar corecore